def parse_request(self, message): try: from http_parser.parser import HttpParser except ImportError: from http_parser.pyparser import HttpParser p = HttpParser() nparsed = p.execute(message,len(message)) self.method = p.get_method() self.path = p.get_path() self.headers = p.get_headers() if p.get_method() == 'GET': self.status = 200 #if "Range" in p.get_headers(): # strings = self.headers["Range"] # print strings elif p.get_method() != 'GET': self.status = 501 #if the method is not a GET #TODO maybe make this a head request eventually if you do the download accelerator if not p.get_method(): self.status = 400 if p.get_path() == '/': self.path = '/index.html' elif p.get_path().endswith('/'): self.path += 'index.html' if p.get_path() is None: self.status = 501 #print self.path """ print '\nMethod: ' print p.get_method() print '\nPath: ' print p.get_path() print '\nHeaders: ' print p.get_headers() print '\nVersion: ' version = p.get_version() print version """ #print '\nRESPONSE CODE: ' + str(self.status) + '\n' #print self.path #print self.status #working so far
def parse_request(http_request, protocol, host, port): """ Parse HTTP request form Burp Suite to dict TODO cookie parse """ httpParser = HttpParser() httpParser.execute(http_request, len(http_request)) header = dict(httpParser.get_headers()) header.pop("Content-Length") # remove Content-Length # cookie = header["Cookie"] body = httpParser.recv_body() method = httpParser.get_method() url = protocol + "://" + host + httpParser.get_path() query = httpParser.get_query_string() params = dict(urlparse.parse_qsl(query)) data = dict(urlparse.parse_qsl(body)) if method == "POST" else {} try: jsondata = json.loads( body) if method == "POST" and header["Content-Type"] == "application/json" else {} except Exception as e: print "[!] " + e jsondata = {} return method, url, header, params, data, jsondata
class Request: parser = None _body = None def __init__(self, data): self.parser = HttpParser() self.parser.execute(data, len(data)) self.method = self.parser.get_method() self.path = self.parser.get_path() self.headers = self.parser.get_headers() self.querystring = parse_qs(unquote(self.parser.get_query_string()), keep_blank_values=True) if self.querystring: self.path += "?{}".format(self.parser.get_query_string()) def add_data(self, data): self.parser.execute(data, len(data)) @property def body(self): if self._body is None: self._body = decode_from_bytes(self.parser.recv_body()) return self._body def __str__(self): return "{} - {} - {}".format(self.method, self.path, self.headers)
async def handle_url(self, request: HttpParser) -> HttpResponse: method = request.get_method().upper() if method not in ('GET', 'HEAD'): return HttpResponse(405) path = request.get_path() if path.endswith('/'): path += 'index.html' relative = os.path.relpath(url2pathname(path), '/') filename = os.path.join(self.root_dir, relative) try: byte_range = None if 'Range' in request.get_headers(): # Not RFC 7233 compliant range_match = re.match(r'bytes=(\d+)-(\d+)', request.get_headers()['Range']) if not range_match: return HttpResponse(400, 'Invalid Range header') start, end = map(int, range_match.groups()) # Python range is exclusive, HTTP Range is inclusive byte_range = range(start, end + 1) length = 0 async with aiofiles.open(filename, 'rb') as f: if method == 'GET': if byte_range is not None: await f.seek(byte_range.start) data = await f.read(len(byte_range)) byte_range = range(byte_range.start, byte_range.start + len(data)) await f.seek(0, os.SEEK_END) length = await f.tell() response = HttpResponse(206, data) else: data = await f.read() response = HttpResponse(200, data) else: # Used instead of os.stat to ensure the file can be accessed response = HttpResponse(200) await f.seek(0, os.SEEK_END) length = await f.tell() if byte_range is not None: byte_range = range(byte_range.start, min(length, byte_range.stop)) response.headers['Content-Length'] = length if byte_range is not None: response.headers['Content-Range'] = 'bytes %d-%d/%d' % ( byte_range.start, byte_range.stop - 1, length) except FileNotFoundError: return HttpResponse(404, 'This is not the file you are looking for') except PermissionError: return HttpResponse(403) _, extension = os.path.splitext(filename) extension = extension[1:] if extension.lower() in self.mime_types: response.headers['Content-Type'] = self.mime_types[ extension.lower()] response.headers['Last-Modified'] = formatdate( os.stat(filename).st_mtime, False, True) return response
def __init__(self, raw): self.raw = raw req = HttpParser() req.execute(raw.request, len(raw.request)) self.headers = req.get_headers() self.body = b"".join(req._body) self.url = req.get_url() self.path = req.get_path() self.method = req.get_method() self.arguments = req.get_query_string() self.slug = [a for a in self.path.split('/') if a != '']
def proxy(data): """ the function called by tproxy to determine where to send traffic tproxy will call this function repeatedly for the same connection, as we receive more incoming data, until we return something other than None. typically our response tells tproxy where to proxy the connection to, but may also tell it to hang up, or respond with some error message. """ log = logging.getLogger("proxy") bytes_received = len(data) parser = HttpParser() bytes_parsed = parser.execute(data, bytes_received) if bytes_parsed != bytes_received: return { 'close': 'HTTP/1.0 400 Bad Request\r\n\r\nParse error' } if not parser.is_headers_complete(): if bytes_received > MAX_HEADER_LENGTH: return { 'close': 'HTTP/1.0 400 Bad Request\r\n' '\r\nHeaders are too large' } return None headers = parser.get_headers() # the hostname may be in the form of hostname:port, in which case we want # to discard the port, and route just on hostname route_host = headers.get('HOST', None) if route_host: match = _HOST_PORT_REGEXP.match(route_host) if match: route_host = match.group(1) try: log.debug("Routing %r" % ( parser.get_url(), )) return _ROUTER.route( route_host, parser.get_method(), parser.get_path(), parser.get_query_string()) except Exception, err: log.error("error routing %r, %s" % ( parser.get_url(), traceback.format_exc(), )) gevent.sleep(ERROR_DELAY) return { 'close': 'HTTP/1.0 502 Gateway Error\r\n' '\r\nError routing request' }
def proxy(data): """ the function called by tproxy to determine where to send traffic tproxy will call this function repeatedly for the same connection, as we receive more incoming data, until we return something other than None. typically our response tells tproxy where to proxy the connection to, but may also tell it to hang up, or respond with some error message. """ log = logging.getLogger("proxy") bytes_received = len(data) parser = HttpParser() bytes_parsed = parser.execute(data, bytes_received) if bytes_parsed != bytes_received: return {'close': 'HTTP/1.0 400 Bad Request\r\n\r\nParse error'} if not parser.is_headers_complete(): if bytes_received > MAX_HEADER_LENGTH: return { 'close': 'HTTP/1.0 400 Bad Request\r\n' '\r\nHeaders are too large' } return None headers = parser.get_headers() # the hostname may be in the form of hostname:port, in which case we want # to discard the port, and route just on hostname route_host = headers.get('HOST', None) if route_host: match = _HOST_PORT_REGEXP.match(route_host) if match: route_host = match.group(1) try: log.debug("Routing %r" % (parser.get_url(), )) return _ROUTER.route(route_host, parser.get_method(), parser.get_path(), parser.get_query_string()) except Exception, err: log.error("error routing %r, %s" % ( parser.get_url(), traceback.format_exc(), )) gevent.sleep(ERROR_DELAY) return { 'close': 'HTTP/1.0 502 Gateway Error\r\n' '\r\nError routing request' }
def process(indir, outdir): findstr = os.path.join(indir, '*') for fn in glob.glob(findstr): print fn with open(fn, 'rb') as f: http_bin = f.read() n = 0 while n < len(http_bin): http = HttpParser() nparsed = http.execute(http_bin[n:], len(http_bin) - n) if not http.is_message_complete(): break if http.get_path() != '': # send http_method = http_bin[n:].split()[ 0] #http.get_method() -- seems bugged http_path = http_bin[n:].split()[1] http_request = parse_http_packet(http.get_headers(), http.recv_body()) http_hostname = 'unknown' if 'Host' in http.get_headers(): http_hostname = http.get_headers()['Host'] print http_hostname nparsed -= 1 full_http = http_method + ' ' + http_path + '\n' full_http += http_request + '\n' save_http_packet(outdir, os.path.basename(fn), http_hostname, http_path, 'send', full_http) else: # recv http_status = http.get_status_code() http_reply = parse_http_packet(http.get_headers(), http.recv_body()) full_http += str(http_status) + '\n' full_http += http_reply save_http_packet(outdir, os.path.basename(fn), http_hostname, '', 'recv', full_http) n += nparsed
def handleData(self,fd): self.debug("Entering handleData") if '\r\n\r\n' not in self.con_cache[fd]: self.debug("Partial message - Exiting handleData") return p = HttpParser() nparsed = p.execute(self.con_cache[fd],len(self.con_cache[fd])) method = p.get_method() path = p.get_path() headers = p.get_headers() debugStr = "\nMethod: %s\nPath: %s\nHeaders: %s\n" % (method,path,headers) #self.debug(debugStr) rangeRequest = None if 'Range' in headers: rangeRequest = headers['Range'] self.debug("Range: %s" % (rangeRequest)) validMethods = ['GET','HEAD','PUT','DELETE','POST'] isValid = False if method not in validMethods: response = self.makeError('400','Bad Request') elif method != 'GET' and method != 'HEAD': response = self.makeError('501','Not Implemented') else: if path == '/': path = '/index.html' path = self.hosts['default'] + path (isValid,response) = self.makeResponse(path,rangeRequest) self.clients[fd].send(response) self.debug("PATH:%s"%(path)) if isValid and not rangeRequest and method != "HEAD": self.sendAll(path,fd) elif isValid and rangeRequest and method != "HEAD": (start,end) = self.getByteRange(rangeRequest) self.send(path,fd,start,end) self.debug("Exiting handleData")
def proxy(data): parser = HttpParser(0) parser.execute(data, len(data)) path = parser.get_path() if path.startswith('/static'): path = os.path.join(ROOT, path[1:]) if os.path.exists(path): fno = os.open(path, os.O_RDONLY) return { "file": fno, "reply": "HTTP/1.1 200 OK\r\n\r\n" } else: return { "close": True } return { "close": True }
def parseData(self, data, fd): p = HttpParser() nparsed = p.execute(data,len(data)) resp = Response() if self.debugging: print p.get_method(),p.get_path(),p.get_headers() if (p.get_method() == 'GET'): resp = self.handleGet(p, resp) elif (p.get_method() == 'DELETE'): resp.setCode(501) else: resp.setCode(400) self.clients[fd].send(str(resp)) try: self.clients[fd].send(resp.body) except: pass
class HttpRequest(object): __cgi_config = None def __init__(self, request_text, server_config): self.__parser = HttpParser() self.__parser.execute(request_text, len(request_text)) self.__server_config = server_config def get_body(self): if self.__parser.is_partial_body(): return self.__parser.recv_body() return None def get_headers(self): return self.__parser.get_headers() def get_request_method(self): return self.__parser.get_method() def get_request_path(self): return self.__parser.get_path() def get_cgi_config(self): if self.__cgi_config is None: __cgi_config = {} #WSGI required variable #__cgi_config['wsgi.input'] = io.StringIO(self.get_body()) #CGI __cgi_config['SERVER_NAME'] = self.__server_config['server_name'] __cgi_config['SERVER_PORT'] = self.__server_config['server_port'] __cgi_config['SERVER_PROTOCOL']: 'HTTP/1.1' __cgi_config['REQUEST_METHOD'] = self.get_request_method() __cgi_config['PATH_INFO'] = self.get_request_path() for header, value in self.get_headers().items(): __cgi_config[f'HTTP_{header}'] = value self.__cgi_config = __cgi_config return self.__cgi_config
async def run(self): p = HttpParser(kind=0) while not p.is_message_complete(): data = await self._loop.sock_recv(self._sock, 1024) if not data: break nparsed = p.execute(data, len(data)) if nparsed != len(data): break if not (p.is_message_complete() and p.get_method() in ("GET", "HEAD")): response = (b"HTTP/1.1 400 Bad Request\r\n" b"Content-Type: text/plain\r\n" b"\r\n" b"Bad Request\n") await self._loop.sock_sendall(self._sock, response) await self.close() return channel = p.get_path().strip("/") monitor = self._server.get_monitor(channel) if monitor is None: response = (b"HTTP/1.1 404 Not Found\r\n" b"Content-Type: text/plain\r\n" b"\r\n") await self._loop.sock_sendall(self._sock, response) await self.close() return response = b"HTTP/1.1 200 OK\r\n" if monitor.has_video: response += b"Content-Type: video/x-matroska\r\n\r\n" else: response += b"Content-Type: audio/x-matroska\r\n\r\n" await self._loop.sock_sendall(self._sock, response) if p.get_method() == "HEAD": await self.close() return monitor.add_fd(self._sock.fileno())
def process_new_conn(self,sockfd): data = '' sockfd.settimeout(2) while True: try: data2 = sockfd.recv(1024) print data2 if not data2: break data = data + data2 except socket.timeout as e: break p = HttpParser() #data = str.replace(data, '\n', '\r\n') recved = len(data) nparsed = p.execute(data, recved) #assert nparsed == recved #import pdb #pdb.set_trace() assert nparsed == recved #get_head = p.get_headers() get_path = p.get_path() #print get_path if(get_path == '/status'): print "---------------Receiving data from DDOS Detector for a response------------------" self.process_status_req(sockfd) print "--------------------------------------------------------------" else: print "---------------Receiving data from DDOS Detector to replay it in a honeypot------------------" self.process_replay_req(sockfd,p) print "--------------------------------------------------------------"
class HTTPProtocol(FlowControlMixin, asyncio.Protocol): def __init__(self, stream_reader, callback, loop=None): super().__init__(loop=loop) self._stream_reader = stream_reader self._stream_writer = None self._callback = callback self._task = None self._server = None def connection_made(self, transport): self._parser = HttpParser() self._stream_reader.set_transport(transport) self._stream_writer = asyncio.StreamWriter( transport, self, self._stream_reader, self._loop, ) # Grab the name of our socket if we have it self._server = transport.get_extra_info("sockname") def connection_lost(self, exc): if exc is None: self._stream_reader.feed_eof() else: self._stream_reader.set_exception(exc) super().connection_lost(exc) def data_received(self, data): # Parse our incoming data with our HTTP parser self._parser.execute(data, len(data)) # If we have not already handled the headers and we've gotten all of # them, then invoke the callback with the headers in them. if self._task is None and self._parser.is_headers_complete(): coro = self.dispatch( { "server": self._server, "protocol": b"HTTP/" + b".".join( str(x).encode("ascii") for x in self._parser.get_version() ), "method": self._parser.get_method().encode("latin1"), "path": self._parser.get_path().encode("latin1"), "query": self._parser.get_query_string().encode("latin1"), "headers": self._parser.get_headers(), }, self._stream_reader, self._stream_writer, ) self._task = asyncio.Task(coro, loop=self._loop) # Determine if we have any data in the body buffer and if so feed it # to our StreamReader if self._parser.is_partial_body(): self._stream_reader.feed_data(self._parser.recv_body()) # Determine if we've completed the end of the HTTP request, if we have # then we should close our stream reader because there is nothing more # to read. if self._parser.is_message_complete(): self._stream_reader.feed_eof() def eof_received(self): # We've gotten an EOF from the client, so we'll propagate this to our # StreamReader self._stream_reader.feed_eof() @asyncio.coroutine def dispatch(self, request, request_body, response): # Get the status, headers, and body from the callback. The body must # be iterable, and each item can either be a bytes object, or an # asyncio coroutine, in which case we'll ``yield from`` on it to wait # for it's value. status, resp_headers, body = yield from self._callback( request, request_body, ) # Write out the status line to the client for this request # TODO: We probably don't want to hard code HTTP/1.1 here response.write(b"HTTP/1.1 " + status + b"\r\n") # Write out the headers, taking special care to ensure that any # mandatory headers are added. # TODO: We need to handle some required headers for key, values in resp_headers.items(): # In order to handle headers which need to have multiple values # like Set-Cookie, we allow the value of the header to be an # iterable instead of a bytes object, in which case we'll write # multiple header lines for this header. if isinstance(values, (bytes, bytearray)): values = [values] for value in values: response.write(key + b": " + value + b"\r\n") # Before we get to the body, we need to write a blank line to separate # the headers and the response body response.write(b"\r\n") for chunk in body: # If the chunk is a coroutine, then we want to wait for the result # before we write it. if asyncio.iscoroutine(chunk): chunk = yield from chunk # Write our chunk out to the connect client response.write(chunk) # We've written everything in our iterator, so we want to close the # connection. response.close()
while True: (clientsocket, address) = serversocket.accept() parser = HttpParser() body = [] while True: data = clientsocket.recv(1024) if not data: break recved = len(data) nparsed = parser.execute(data, recved) assert nparsed == recved if parser.is_headers_complete(): print parser.get_method() print parser.get_path() if parser.is_partial_body(): body.append(parser.recv_body()) if parser.is_message_complete(): break print ''.join(body) result = process_request(parser.get_method(), parser.get_path(), dict([x.split('=') for x in ''.join(body).split('&') if len(x.split('=')) == 2])) result += '\n' clientsocket.send(response + str(result)) print result
class HttpStream(object): """ An HTTP parser providing higher-level access to a readable, sequential io.RawIOBase object. You can use implementions of http_parser.reader (IterReader, StringReader, SocketReader) or create your own. """ def __init__(self, stream, kind=HTTP_BOTH, decompress=False): """ constructor of HttpStream. :attr stream: an io.RawIOBase object :attr kind: Int, could be 0 to parseonly requests, 1 to parse only responses or 2 if we want to let the parser detect the type. """ self.parser = HttpParser(kind=kind, decompress=decompress) self.stream = stream def _check_headers_complete(self): if self.parser.is_headers_complete(): return while True: try: data = self.next() except StopIteration: if self.parser.is_headers_complete(): return raise NoMoreData() if self.parser.is_headers_complete(): return def url(self): """ get full url of the request """ self._check_headers_complete() return self.parser.get_url() def path(self): """ get path of the request (url without query string and fragment """ self._check_headers_complete() return self.parser.get_path() def query_string(self): """ get query string of the url """ self._check_headers_complete() return self.parser.get_query_string() def fragment(self): """ get fragment of the url """ self._check_headers_complete() return self.parser.get_fragment() def version(self): self._check_headers_complete() return self.parser.get_version() def status_code(self): """ get status code of a response as integer """ self._check_headers_complete() return self.parser.get_status_code() def status(self): """ return complete status with reason """ status_code = self.status_code() reason = status_reasons.get(int(status_code), 'unknown') return "%s %s" % (status_code, reason) def method(self): """ get HTTP method as string""" self._check_headers_complete() return self.parser.get_method() def headers(self): """ get request/response headers, headers are returned in a OrderedDict that allows you to get value using insensitive keys.""" self._check_headers_complete() return self.parser.get_headers() def should_keep_alive(self): """ return True if the connection should be kept alive """ self._check_headers_complete() return self.parser.should_keep_alive() def is_chunked(self): """ return True if Transfer-Encoding header value is chunked""" self._check_headers_complete() return self.parser.is_chunked() def wsgi_environ(self, initial=None): """ get WSGI environ based on the current request. :attr initial: dict, initial values to fill in environ. """ self._check_headers_complete() return self.parser.get_wsgi_environ() def body_file(self, buffering=None, binary=True, encoding=None, errors=None, newline=None): """ return the body as a buffered stream object. If binary is true an io.BufferedReader will be returned, else an io.TextIOWrapper. """ self._check_headers_complete() if buffering is None: buffering = -1 if buffering < 0: buffering = DEFAULT_BUFFER_SIZE raw = HttpBodyReader(self) buffer = BufferedReader(raw, buffering) if binary: return buffer text = TextIOWrapper(buffer, encoding, errors, newline) return text def body_string(self, binary=True, encoding=None, errors=None, newline=None): """ return body as string """ return self.body_file(binary=binary, encoding=encoding, newline=newline).read() def __iter__(self): return self def next(self): if self.parser.is_message_complete(): raise StopIteration # fetch data b = bytearray(DEFAULT_BUFFER_SIZE) recved = self.stream.readinto(b) if recved is None: raise NoMoreData("no more data") del b[recved:] # parse data nparsed = self.parser.execute(bytes(b), recved) if nparsed != recved and not self.parser.is_message_complete(): raise ParserError("nparsed != recved") if recved == 0: raise StopIteration return bytes(b)
def run(self): queuename = "A:Queue:httpproxy" if self._config is not None and 'httpproxy' in self._config and self._config['httpproxy'] is not None: if 'Consumer_Queue_Name' in self._config['httpproxy'] and self._config['httpproxy']['Consumer_Queue_Name'] is not None: queuename = self._config['httpproxy']['Consumer_Queue_Name'] selfqueuename = "%s:%s" % (queuename, os.getpid()) logger.debug("PublishThread::run : %s" % (selfqueuename)) servicelist = os.listdir('./apps') while True: try: sockobj = self._httpclientsocketqueue.get() request_path = "" body = [] p = HttpParser() seqid = uuid.uuid1() requestdict = dict() requestdict['sock'] = sockobj # requestdatetime = time.strftime( # '%Y.%m.%d.%H.%M.%S', time.localtime(time.time())) requestdict['requestdatetime'] = time.time() self._response_socket_dict[seqid.__str__()] = requestdict # logger.debug("responsesocketdict len = %d", len(self._response_socket_dict)) while True: request = sockobj.recv(self._recvbuflen) # logger.warning("request : %s" % (request)) recved = len(request) # logger.warning("recved : %d" % (recved)) if(recved == 0): logger.warning("socket is closed by peer") sockobj.close() break nparsed = p.execute(request, recved) # logger.warning("nparsed : %d" % (nparsed)) if nparsed != recved: logger.warning("parse error") sockobj.close() break # if p.is_headers_complete(): # request_headers = p.get_headers() # for key in request_headers: # logger.debug("%s: %s" % (key, request_headers[key])) # logger.warning("headers complete") if p.is_partial_body(): body.append(p.recv_body()) # logger.warning("body : %s" % (body)) if p.is_message_complete(): # logger.warning("message complete") break content = "".join(body) routekey = "" servicepath = "" # 如果是/xxx格式认为是route key,如果是/xxx/yyy/zzz格式认为是dest service request_path = p.get_path()[1:] # logger.warning('PublishThread request_path (%s), is routekey (%d)' % (request_path, request_path.find('/'))) # logger.debug("content : %s" % (content)) if request_path.find('/') == -1 and len(request_path) and request_path in servicelist: routekey = "A:Queue:%s" % request_path if request_path in self._config: routekey = self._config[request_path]['Consumer_Queue_Name'] if len(content) == 0: content_json = dict() else: content_json = json.loads(content) content_json['sockid'] = seqid.__str__() content_json['from'] = selfqueuename self._redis.lpush(routekey, json.dumps(content_json)) else: ret = dict() ret['error_code'] = '40004' sockobj.sendall('HTTP/1.1 200 OK\n\n%s' % (json.dumps(ret))) # sockobj.shutdown(socket.SHUT_WR) sockobj.close() self._response_socket_dict.pop(seqid.__str__()) continue # sockobj.sendall('HTTP/1.1 200 OK\n\nWelcome %s' % ( # seqid)) # sockobj.close() except Exception as e: logger.error("PublishThread %s except raised : %s " % ( e.__class__, e.args))
def handle_read(self): try: logging.debug('Connection.handle_read - id=%d' % self.id) b = self.sock.recv(2048) logging.debug('Connection.handle_read - received buffer size is %d bytes' % len(b)) logging.debug('Connection.handle_read - received buffer is : \n%s' % b) if not len(b): logging.debug('Connection.handle_read - 0 bytes received on %d. closing' % self.id) self.close() return self.read_buf += b except socket.error as err: if err.args[0] not in NONBLOCKING: self.handle_error('%s' % args[1]) else : logging.error('Connection.handle_read - NONBLOCKING event on read : %s' % args[1]) else: # check if we have a full http request parser = HttpParser() recved = len(self.read_buf) nparsed = parser.execute(self.read_buf, recved) assert nparsed == recved if not parser.is_message_complete(): # we got a partial request keep on reading logging.debug( 'Connection.handle_read - partial buffer received : \n%s' % self.read_buf) self.reset(pyev.EV_READ) else : # we got a full request self.read_buf = '' # match the verb with URI and call # after that register for write to send response verb = parser.get_method() url = parser.get_path() logging.debug('Connection.handle_read - id %d - method is %s and url %s' % (self.id, verb, url)) call, keyword_args = register.get_callable(url, verb) if not call : err = HttpResponse() err.status_code = 404 err.status_string = 'Not Found' err.headers['Content-Type'] = 'application/txt' err.body = 'URI Not Found\r\n' self.write_buf = err.to_string() else : keyword_args['http_request'] = parser logging.debug('Connection.handle_read - kargs=%s' % keyword_args) try : response = call(*[register.handler,], **keyword_args) self.write_buf = response.to_string() except : err = HttpResponse() err.status_code = 500 err.status_string = 'Internal Server Error' err.headers['Content-Type'] = 'application/txt' err.body = 'Upsssss.\r\n' self.write_buf = err.to_string() logging.debug('Connection.handle_read - requesting write %d' % self.id) self.reset(pyev.EV_WRITE)
class PupyHTTPWrapperServer(BasePupyTransport): path = '/index.php?d=' allowed_methods = ('GET') server = None headers = { 'Content-Type': 'text/html; charset=utf-8', 'Server': 'Apache', 'Connection': 'close', } __slots__ = ('parser', 'is_http', 'body', 'downstream_buffer', 'well_known', 'omit', 'probe_len') def __init__(self, *args, **kwargs): super(PupyHTTPWrapperServer, self).__init__(*args, **kwargs) self.parser = HttpParser() self.is_http = None self.body = [] self.downstream_buffer = Buffer() self.well_known = ('GET', 'POST', 'OPTIONS', 'HEAD', 'PUT', 'DELETE') self.omit = tuple('{} {}'.format(x, y) for x in self.well_known for y in (self.path, '/ws/', 'ws/')) self.probe_len = max(len(x) for x in self.omit) def _http_response(self, code, status, headers=None, datasize=None, content=None): headers = {} headers.update(self.headers) if headers: headers.update(headers) if datasize: headers.update({ 'Content-Length': datasize, 'Content-Type': 'application/octet-stream', }) data = '\r\n'.join([ 'HTTP/1.1 {} {}'.format(code, status), '\r\n'.join([ '{}: {}'.format(key, value) for key, value in headers.iteritems() ]) ]) + '\r\n\r\n' self.downstream.write(data) def _handle_file(self, filepath): try: with open(filepath) as infile: size = stat(filepath).st_size self._http_response(200, 'OK', datasize=size) while True: data = infile.read(65535) if data: self.downstream.write(data) else: break except: self._http_response(404, 'Not found', 'Not found') def _handle_not_found(self): self._http_response(404, 'Not found', 'Not found') def _handle_http(self, data): self.parser.execute(data, len(data)) if self.parser.is_headers_complete(): try: if not self.parser.get_method() in self.allowed_methods: self._http_response(405, 'Method Not Allowed') else: urlpath = self.parser.get_path() urlpath = [ x.strip() for x in urlpath.split('/') if (x and not str(x) in ('.', '..')) ] root = self.server.config.get_folder('wwwroot') secret = self.server.config.getboolean('httpd', 'secret') log = self.server.config.getboolean('httpd', 'log') if secret: wwwsecret = self.server.config.get('randoms', 'wwwsecret', random=5) if not (urlpath and urlpath[0] == wwwsecret): self._handle_not_found() if log: self.server.info( '{}: GET {} | SECRET = {}'.format( '{}:{}'.format(*self.downstream. transport.peer[:2]), urlpath, wwwsecret), error=True) return urlpath = urlpath[1:] urlpath = path.sep.join([ self.server.config.get('randoms', x, new=False) or x for x in urlpath ]) if not urlpath: urlpath = 'index.html' filepath = path.join(root, urlpath) if path.exists(filepath): self._handle_file(filepath) if log: message = urlpath if filepath in self.server.served_content: message = message + ' <' + self.server.served_content[ filepath] + '>' self.server.info('{}: GET /{}'.format( '{}:{}'.format( *self.downstream.transport.peer[:2]), message)) else: self._handle_not_found() if log: self.server.info('{}: GET {}'.format( '{}:{}'.format( *self.downstream.transport.peer[:2]), urlpath), error=True) finally: self.close() def downstream_recv(self, data): header = data.peek(self.probe_len) if __debug__: logger.debug('Recv: len=%d // header = %s', len(data), header) if self.server and self.is_http is None: self.is_http = header.startswith(self.well_known) and \ not header.startswith(self.omit) if __debug__: logger.debug('Http: %s', self.is_http) if self.is_http: self._handle_http(data.read()) else: if __debug__: logger.debug('Write to upstream: len=%d, handler=%s', len(data), self.upstream.on_write_f) data.write_to(self.upstream) if self.downstream_buffer: if __debug__: logger.debug( 'Flush buffer to downstream: len=%d, handler=%s', len(self.downstream_buffer), self.downstream.on_write_f) self.downstream_buffer.write_to(self.downstream) if __debug__: logger.debug('Release transport') raise ReleaseChainedTransport() def upstream_recv(self, data): if __debug__: logger.debug('Send intent: len=%d', len(data)) if self.is_http is None: data.write_to(self.downstream_buffer) if __debug__: logger.debug('HTTP? Append to pending buffer: total len=%d', len(self.downstream_buffer)) elif not self.is_http: if __debug__: logger.debug('Non-HTTP: Direct pass (handler=%s)', self.downstream.on_write_f) if self.downstream_buffer: self.downstream_buffer.write_to(self.downstream) data.write_to(self.downstream) else: if __debug__: logger.debug('HTTP: Omit data') pass
def handle_read(self): try: logging.debug('Connection.handle_read - id=%d' % self.id) b = self.sock.recv(2048) logging.debug( 'Connection.handle_read - received buffer size is %d bytes' % len(b)) logging.debug( 'Connection.handle_read - received buffer is : \n%s' % b) if not len(b): logging.debug( 'Connection.handle_read - 0 bytes received on %d. closing' % self.id) self.close() return self.read_buf += b except socket.error as err: if err.args[0] not in NONBLOCKING: self.handle_error('%s' % args[1]) else: logging.error( 'Connection.handle_read - NONBLOCKING event on read : %s' % args[1]) else: # check if we have a full http request parser = HttpParser() recved = len(self.read_buf) nparsed = parser.execute(self.read_buf, recved) assert nparsed == recved if not parser.is_message_complete(): # we got a partial request keep on reading logging.debug( 'Connection.handle_read - partial buffer received : \n%s' % self.read_buf) self.reset(pyev.EV_READ) else: # we got a full request self.read_buf = '' # match the verb with URI and call # after that register for write to send response verb = parser.get_method() url = parser.get_path() logging.debug( 'Connection.handle_read - id %d - method is %s and url %s' % (self.id, verb, url)) call, keyword_args = register.get_callable(url, verb) if not call: err = HttpResponse() err.status_code = 404 err.status_string = 'Not Found' err.headers['Content-Type'] = 'application/txt' err.body = 'URI Not Found\r\n' self.write_buf = err.to_string() else: keyword_args['http_request'] = parser logging.debug('Connection.handle_read - kargs=%s' % keyword_args) try: response = call(*[ register.handler, ], **keyword_args) self.write_buf = response.to_string() except: err = HttpResponse() err.status_code = 500 err.status_string = 'Internal Server Error' err.headers['Content-Type'] = 'application/txt' err.body = 'Upsssss.\r\n' self.write_buf = err.to_string() logging.debug('Connection.handle_read - requesting write %d' % self.id) self.reset(pyev.EV_WRITE)
def parseRequest(self, data): if (self.debug): print "entering parse Request--------\n" # Create a parser object try: from http_parser.parser import HttpParser except ImportError: from http_parser.pyparser import HttpParser parser = HttpParser() nparser = parser.execute(data, len(data)) response = None path = None host = None isRangeReq = None isHeadReq = False # Get Protocol Version version = "HTTP/1.1" #check if request is valid method = parser.get_method() if method not in self.validMethods: if (self.debug): print "received a non valid method: %s\n" %method response = self.createError("400", "Bad Request") elif method != "GET" and method != "HEAD": if (self.debug): print "received a method which we do not implement\n" response = self.createError("501", "Not Implemented") else: if method == "HEAD": isHeadReq = True url = parser.get_path() # Check for url errors if (url == ""): if self.debug: print "url is empty\n" resposne = self.createError("400", "Bad Request") elif (url == "/"): url = "/index.html" headers = parser.get_headers() if "Range" in headers: isRangeReq = headers["Range"] if self.debug: print "Range Request = %s" %isRangeReq #get Host if "Host" in headers: host = headers["Host"].split(':')[0] if (self.debug): print "host is: %s\n"%host # Handle errors in host if host not in self.hosts: if 'default' not in self.hosts: if self.debug: print " not host or default\n" response = self.createError("400", "Bad Request") else: # Use the default host if self.debug: print "using default host\n" path = self.hosts['default'] if (self.debug): print "path is: %s\n"%path path += url if (self.debug): print "full path is: %s\n"%path response = self.createResponse(path, isRangeReq) else: #use given host path = self.hosts[host] if (self.debug): print "path is: %s\n"%path path += url if (self.debug): print "full path is: %s\n"%path response = self.createResponse(path, isRangeReq) if isHeadReq: path = None if self.debug: print "end of parse request\n" return response, path, isRangeReq
def recvrawsocket2(sockobj, address): try: # if 1: # logger.error(sockobj) # logger.debug(dir(sockobj)) request_path = "" body = [] p = HttpParser() seqid = uuid.uuid1() requestdict = dict() requestdict['sock'] = sockobj # requestdatetime = time.strftime('%Y.%m.%d.%H.%M.%S', time.localtime(time.time())) requestdatetime = time.time() requestdict['requestdatetime'] = requestdatetime responsesocketdict[seqid.__str__()] = requestdict # logger.debug("responsesocketdict len = %d", len(responsesocketdict)) while True: request = sockobj.recv(recv_buf_len) # logger.warning("request : %s" % (request)) recved = len(request) # logger.warning("recved : %d" % (recved)) if (recved == 0): logger.warning("socket is closed by peer %r" % (sockobj)) sockobj.close() break nparsed = p.execute(request, recved) # logger.warning("nparsed : %d" % (nparsed)) if nparsed != recved: logger.warning("parse error") sockobj.close() break if p.is_headers_complete(): request_headers = p.get_headers() # for key in request_headers: # logger.debug("headers complete %s" % (request_headers.__str__())) # logger.warning("headers complete") if p.is_partial_body(): body.append(p.recv_body()) # logger.warning("body : %s" % (body)) if p.is_message_complete(): # logger.warning("message complete") break # logger.debug(p.get_method()) # logger.debug(p.get_path()) # logger.debug(p.get_query_string()) routekey = "" servicepath = "" # 如果是/xxx格式认为是route key,如果是/xxx/yyy/zzz格式认为是dest service request_path = p.get_path()[1:] request_pathlist = request_path.split('/') servicename = request_pathlist[0] action_name = '' servicelist = os.listdir('./apps') content = dict() if p.get_method() == 'GET': if servicename == 'showip': sockobj.sendall("HTTP/1.1 200 OK \n\n%s" % (sockobj)) sockobj.shutdown(socket.SHUT_WR) sockobj.close() return if len(request_pathlist) != 2: ret = dict() ret['errcode'] = '40004' ret['errmsg'] = _errmsg['40004'] sockobj.sendall('HTTP/1.1 500 OK\n\n%s' % (json.dumps(ret))) sockobj.shutdown(socket.SHUT_WR) sockobj.close() return action_name = request_pathlist[1] querystring = p.get_query_string() querylist = querystring.split('&') action_body = dict() for query in querylist: kvlist = query.split('=') action_body[kvlist[0]] = ''.join(kvlist[1:]) content['action_cmd'] = action_name content['seq_id'] = str(random.randint(10000, 1000000)) content['body'] = action_body content['version'] = '1.0' else: if len(body) > 0: content = json.loads("".join(body)) # content = "".join(body) # logger.debug("servicename=%s,action_name=%s"%(servicename,action_name)) # logger.debug("content=%r"%(content)) if servicename == 'testurl': sockobj.sendall('HTTP/1.1 200 OK\n\n%s' % (content['body']['signature'])) sockobj.shutdown(socket.SHUT_WR) sockobj.close() return if servicename in servicelist: routekey = "A:Queue:%s" % servicename if servicename in _config: routekey = _config[servicename]['Consumer_Queue_Name'] content['sockid'] = seqid.__str__() content['from'] = selfqueuename _redis.lpush(routekey, json.dumps(content)) else: ret = dict() ret['errcode'] = '40004' ret['errmsg'] = _errmsg['40004'] sockobj.sendall('HTTP/1.1 404 OK\n\n%s' % (json.dumps(ret))) sockobj.shutdown(socket.SHUT_WR) sockobj.close() # requestdict = dict() # requestdict['sock'] = sockobj # requestdatetime = time.strftime( # '%Y.%m.%d.%H.%M.%S', time.localtime(time.time())) # requestdict['requestdatetime'] = requestdatetime # responsesocketdict[seqid.__str__()] = requestdict # sockobj.sendall('HTTP/1.1 200 OK\n\nWelcome %s' % ( # seqid)) # sockobj.close() except Exception as e: logger.error("recvrawsocket2 %s except raised : %s " % (e.__class__, e.args))
def handle_url(self, request: HttpParser) -> HttpResponse: return HttpResponse( 501, '%s %s' % (request.get_method(), request.get_path()))
class HttpStream(object): """ An HTTP parser providing higher-level access to a readable, sequential io.RawIOBase object. You can use implementions of http_parser.reader (IterReader, StringReader, SocketReader) or create your own. """ def __init__(self, stream, kind=HTTP_BOTH, decompress=False): """ constructor of HttpStream. :attr stream: an io.RawIOBase object :attr kind: Int, could be 0 to parseonly requests, 1 to parse only responses or 2 if we want to let the parser detect the type. """ self.parser = HttpParser(kind=kind, decompress=decompress) self.stream = stream def _check_headers_complete(self): if self.parser.is_headers_complete(): return while True: try: next(self) except StopIteration: if self.parser.is_headers_complete(): return raise NoMoreData("Can't parse headers") if self.parser.is_headers_complete(): return def _wait_status_line(self, cond): if self.parser.is_headers_complete(): return True data = [] if not cond(): while True: try: d = next(self) data.append(d) except StopIteration: if self.parser.is_headers_complete(): return True raise BadStatusLine(b"".join(data)) if cond(): return True return True def _wait_on_url(self): return self._wait_status_line(self.parser.get_url) def _wait_on_status(self): return self._wait_status_line(self.parser.get_status_code) def url(self): """ get full url of the request """ self._wait_on_url() return self.parser.get_url() def path(self): """ get path of the request (url without query string and fragment """ self._wait_on_url() return self.parser.get_path() def query_string(self): """ get query string of the url """ self._wait_on_url() return self.parser.get_query_string() def fragment(self): """ get fragment of the url """ self._wait_on_url() return self.parser.get_fragment() def version(self): self._wait_on_status() return self.parser.get_version() def status_code(self): """ get status code of a response as integer """ self._wait_on_status() return self.parser.get_status_code() def status(self): """ return complete status with reason """ status_code = self.status_code() reason = status_reasons.get(int(status_code), 'unknown') return "%s %s" % (status_code, reason) def method(self): """ get HTTP method as string""" self._wait_on_status() return self.parser.get_method() def headers(self): """ get request/response headers, headers are returned in a OrderedDict that allows you to get value using insensitive keys.""" self._check_headers_complete() return self.parser.get_headers() def should_keep_alive(self): """ return True if the connection should be kept alive """ self._check_headers_complete() return self.parser.should_keep_alive() def is_chunked(self): """ return True if Transfer-Encoding header value is chunked""" self._check_headers_complete() return self.parser.is_chunked() def wsgi_environ(self, initial=None): """ get WSGI environ based on the current request. :attr initial: dict, initial values to fill in environ. """ self._check_headers_complete() return self.parser.get_wsgi_environ() def body_file(self, buffering=None, binary=True, encoding=None, errors=None, newline=None): """ return the body as a buffered stream object. If binary is true an io.BufferedReader will be returned, else an io.TextIOWrapper. """ self._check_headers_complete() if buffering is None: buffering = -1 if buffering < 0: buffering = DEFAULT_BUFFER_SIZE raw = HttpBodyReader(self) buf = BufferedReader(raw, buffering) if binary: return buf text = TextIOWrapper(buf, encoding, errors, newline) return text def body_string(self, binary=True, encoding=None, errors=None, newline=None): """ return body as string """ return self.body_file(binary=binary, encoding=encoding, newline=newline).read() def __iter__(self): return self def __next__(self): if self.parser.is_message_complete(): raise StopIteration # fetch data b = bytearray(DEFAULT_BUFFER_SIZE) recved = self.stream.readinto(b) if recved is None: raise NoMoreData("no more data") del b[recved:] to_parse = bytes(b) # parse data nparsed = self.parser.execute(to_parse, recved) if nparsed != recved and not self.parser.is_message_complete(): raise ParserError("nparsed != recved (%s != %s) [%s]" % (nparsed, recved, bytes_to_str(to_parse))) if recved == 0: raise StopIteration return to_parse next = __next__
def handleRequest(self,data): """Prcess the request made by the client and send the response""" Debug.dprint("POLLER::handleRequest()") self.headers = {} self.headers['Server'] = "SimpleHTTP/0.6 Python/2.7.9" self.headers['Date'] = strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()) #self.headers['Connection'] = "close" p = HttpParser() nparsed = p.execute(data,len(data)) method = p.get_method() path = p.get_path() headers = p.get_headers() Debug.dprint("POLLER::handleClient()::get_method= " + str(p.get_method())) Debug.dprint("POLLER::handleClient()::get_path= " + str(p.get_path())) Debug.dprint("POLLER::handleClient()::get_headers= " + str(p.get_headers())) """Check the Method, if not GET return 501""" if method != 'GET': return self.response501() """Determine Host""" if headers.has_key('Host'): Debug.dprint("POLLER::Host= " + headers['Host']) RootDir = "" if headers['Host'].find("localhost") != -1 and configHost.has_key('localhost'): RootDir = configHost['localhost'] else: RootDir = configHost['default'] else: RootDir = configHost['default'] """Find requested Resource""" if path == "/": path = "/index.html" try: dataType = "" if path.find('.') != -1: pathObjects = path.split('.') dataType = str(pathObjects[len(pathObjects) - 1]) if configMedia.has_key(dataType): self.headers['Content-Type'] = configMedia[dataType] else: self.headers['Content-Type'] = "text/plain" fileName = RootDir + path """See if the requested file actually exists""" if not os.path.isfile(fileName): return self.response404() """Check if we have permissions to open the file""" if not os.access(fileName, os.R_OK): return self.response403() fileReader = open(fileName, 'rb') body = fileReader.read() self.headers['Content-Length'] = os.stat(fileName).st_size self.headers['Last-Modified'] = strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime(os.stat(fileName).st_mtime)) response = "HTTP/1.1 200 OK\r\n" for key in self.headers: response += str(key) + ": " + str(self.headers[key]) + "\r\n" Debug.dprint("POLLER::responseHeader: " + str(key) + ": " + str(self.headers[key])) """response += "Date: " + str(self.headers['Date']) + "\r\n" response += "Server: " + str(self.headers['Server']) + "\r\n" response += "Content-Type: " + str(self.headers['Content-Type']) + "\r\n" response += "Content-Length: " + str(self.headers['Content-Length']) + "\r\n" response += "Last-Modified: " + str(self.headers['Last-Modified']) + "\r\n""" response += "\r\n" response += str(body) return response except IOError: return self.response500()
def handleRequest(self, data): #should only get here if the request is completed to the double line return Debug.dprint("POLLER::handleRequest:data->" + str(data) + "<-data") #create and serve the clients request self.respHeaders = {} self.respHeaders['Server'] = "SimpleHTTP/0.6 Python/2.7.9" self.respHeaders['Date'] = strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()) #parse the data (GET header) try: from http_parser.parser import HttpParser except ImportError: from http_parser.pyparser import HttpParser p = HttpParser() nparsed = p.execute(data,len(data)) #print basic debug from http parser Debug.dprint("POLLER::handleRequest:HttpParser:get_method()->" + p.get_method()) Debug.dprint("POLLER::handleRequest:HttpParser:get_path()->" + p.get_path()) Debug.dprint("POLLER::handleRequest:HttpParser:get_headers()\n") dataHeaders = p.get_headers() for i in dataHeaders: Debug.dprint(i + ":" + dataHeaders[i]) #assign from http parser, headers grabbed previously method = p.get_method(); path = p.get_path(); #check for GET, if not return 501 if method != 'GET': return self.code501(); #identify host if dataHeaders.has_key('Host'): #if a host key is present Debug.dprint("POLLER::handleRequest:Host->" + dataHeaders['Host']) #check for the host key in the config dictionary if configHost.has_key(dataHeaders['Host']): #if the specified host is in the config dictionary, make the root directory the path assiated with requested host rootDir = configHost[dataHeaders['Host']] else: #if the specified host is not in the config dictionary, set to default??? THIS MAY NEED TO BE AN ERROR rootDir = configHost['default'] else: #if a host key is not present rootDir = configHost['default'] #identify requested file #for the case of an empty path, point to index if path =="/": #if the path is blank, set the path to index.html path = "/index.html" #attempt to retreive the file try: #identify the type of file fileType = "" if path.find('.') != -1: #split at the file extention period and isolate the filetype pathSplit = path.split('.') fileType = str(pathSplit[len(pathSplit) - 1]) Debug.dprint("POLLER::handleRequest:fileType->" + str(fileType)); #assign a MIME type from the condif dictionary if configMedia.has_key(fileType): self.respHeaders['Content-Type'] = configMedia[fileType] else: self.respHeaders['Content-Type'] = "test/plain" #check if the file excists, if not throw code 404 #create filepath filePath = rootDir + path Debug.dprint("POLLER::handleRequest:filePath->" + str(filePath)); if not os.path.isfile(filePath): return self.code404() #check for permissions, if not throw code 403 if not os.access(filePath, os.R_OK): return self.code403() #read file as binary into a body variable fileReader = open(filePath, 'rb') respBody = fileReader.read() except IOError: return self.code500() #if everything worked, package response and return self.respHeaders['Content-Length'] = os.stat(filePath).st_size self.respHeaders['Last-Modified'] = strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime(os.stat(filePath).st_mtime)) response = "HTTP/1.1 200 OK\r\n" for key in self.respHeaders: response += str(key) + ": " + str(self.respHeaders[key]) + "\r\n" Debug.dprint("POLLER::responseHeader: " + str(key) + ": " + str(self.respHeaders[key])) response += "\r\n" response += str(respBody) return response
def handle(self): thd = threading.current_thread() # logger.debug("ThreadedTCPRequestHandler--->Handle[%r]"%(thd)) # logger.debug(dir(thd)) # logger.debug(self.client_address) # logger.debug(dir(self.server)) # logger.debug(dir(self.request)) # logger.debug(self.request.__class__) # logger.debug(self.server.socket) fileobj = open('/opt/Keeprapid/KRWatch/server/conf/db.conf', 'r') _json_dbcfg = json.load(fileobj) fileobj.close() fileobj = open("/opt/Keeprapid/KRWatch/server/conf/config.conf", "r") _config = json.load(fileobj) fileobj.close() self._redis = redis.StrictRedis(_json_dbcfg['redisip'], int(_json_dbcfg['redisport']), password=_json_dbcfg['redispassword']) queuename = "W:Queue:httpproxy" if _config is not None and 'httpproxy' in _config and _config[ 'httpproxy'] is not None: if 'Consumer_Queue_Name' in _config['httpproxy'] and _config[ 'httpproxy']['Consumer_Queue_Name'] is not None: queuename = _config['httpproxy']['Consumer_Queue_Name'] servicelist = os.listdir('./apps') try: # if 1: # sockobj = self._httpclientsocketqueue.get() request_path = "" body = [] p = HttpParser() seqid = uuid.uuid1() # requestdict = dict() # requestdict['sock'] = self.request # requestdict['server'] = self.server # requestdatetime = time.strftime('%Y.%m.%d.%H.%M.%S', time.localtime(time.time())) # requestdict['requestdatetime'] = requestdatetime # responsesocketdict[seqid.__str__()] = requestdict # logger.debug("responsesocketdict len = %d", len(responsesocketdict)) selfqueuename = "%s:%s" % (queuename, seqid.__str__()) logger.debug("ThreadedTCPRequestHandler::run : %s" % (selfqueuename)) while True: self.request.settimeout(10) request = self.request.recv(recv_buf_len) # logger.warning("request : %s" % (request)) recved = len(request) # logger.warning("recved : %d" % (recved)) if (recved == 0): logger.warning("socket is closed by peer") self.request.close() return nparsed = p.execute(request, recved) # logger.warning("nparsed : %d" % (nparsed)) if nparsed != recved: logger.warning("parse error") self.request.sendall('HTTP/1.1 500 OK\n\n') self.request.close() break if p.is_partial_body(): body.append(p.recv_body()) # logger.warning("body : %s" % (body)) if p.is_message_complete(): # logger.warning("message complete") break content = "".join(body) routekey = "" servicepath = "" # 如果是/xxx格式认为是route key,如果是/xxx/yyy/zzz格式认为是dest service request_path = p.get_path()[1:] # logger.warning('ThreadedTCPRequestHandler request_path (%s), is routekey (%d)' % (request_path, request_path.find('/'))) # logger.debug("content : %s" % (content)) if content == '': self.request.close() # responsesocketdict.pop(seqid.__str__()) return if request_path.find('/') == -1 and len( request_path) and request_path in servicelist: routekey = "W:Queue:%s" % request_path if request_path in _config: routekey = _config[request_path]['Consumer_Queue_Name'] if len(content) == 0: content_json = dict() else: content_json = json.loads(content) content_json['sockid'] = seqid.__str__() content_json['from'] = selfqueuename self._redis.lpush(routekey, json.dumps(content_json)) #进入接收模块 t1 = time.time() while 1: if self._redis.llen(selfqueuename) > 0: recvdata = self._redis.rpop(selfqueuename) # logger.debug("ThreadedTCPRequestHandler:%r",recvdata) recvbuf = json.loads(recvdata) recvbuf.pop('sockid') recvbuf.pop('from') self.request.sendall('HTTP/1.1 200 OK\n\n%s' % (json.dumps(recvbuf))) self.request.close() return time.sleep(0.1) t2 = time.time() if t2 - t1 > 10: #超时未返回 logger.error( "ThreadedTCPRequestHandler: Waiting...... TIMEOUT") self.request.sendall('HTTP/1.1 500 OK\n\n%s' % (json.dumps(recvbuf))) self.request.close() return else: ret = dict() ret['error_code'] = '40004' self.request.sendall('HTTP/1.1 200 OK\n\n%s' % (json.dumps(ret))) # sockobj.shutdown(socket.SHUT_WR) self.request.close() # responsesocketdict.pop(seqid.__str__()) return except Exception as e: logger.error("ThreadedTCPRequestHandler %s except raised : %s " % (e.__class__, e.args)) self.request.close() return
def handle_request(sock, parser, preread): logging.debug("handle request") if parser: assert parser.is_headers_complete() headers = parser.get_headers() content_length = int(headers["Content-Length"]) if headers.has_key("Content-Length") else 0 assert content_length >= len(preread) if content_length: if preread: nparsed = parser.execute(preread, len(preread)) assert nparsed == len(preread) content_length -= len(preread) while content_length: data = sock.recv(content_length) if not data: logging.warn("client sock closed") return False recved = len(data) content_length -= recved nparsed = parser.execute(data, recved) assert nparsed == recved if parser.is_message_complete(): break else: parser = HttpParser() while True: logging.debug("recv........") data = sock.recv(64 * 1024) if not data: logging.warn("client sock closed") return False recved = len(data) nparsed = parser.execute(data, recved) assert nparsed == recved if parser.is_message_complete(): break obj = None if parser.get_path() == "/upload": obj = handle_upload(sock, parser) elif parser.get_path() == "/sync_upload": obj = handle_sync_upload(sock, parser) elif parser.get_path() == "/download": obj = handle_download(sock, parser) elif parser.get_path() == "/sync": obj = handle_sync(sock, parser) elif parser.get_path() == "/ping": obj = handle_ping(sock, parser) elif parser.get_path() == "/info": obj = handle_info(sock, parser) else: logging.debug("unknown request path:%s", parser.get_path()) if obj is None: sock.send("HTTP/1.1 404 Not Found\r\n") sock.send("Content-Length: 0\r\n") if keepalived: sock.send("Connection: keep-alive\r\n") else: sock.send("Connection: close\r\n") sock.send("\r\n") return False if not isinstance(obj, bool): resp = json.dumps(obj) keepalived = parser.should_keep_alive() sock.send("HTTP/1.1 200 OK\r\n") sock.send("Content-Type: application/json\r\n") sock.send("Content-Length: %d\r\n" % len(resp)) if keepalived: sock.send("Connection: keep-alive\r\n") else: sock.send("Connection: close\r\n") sock.send("\r\n") sock.send(resp) return bool(keepalived) else: return obj
def recvrawsocket2(sockobj, address): try: logger.error(sockobj) request_path = "" body = [] p = HttpParser() seqid = uuid.uuid1() requestdict = dict() requestdict['sock'] = sockobj # requestdatetime = time.strftime('%Y.%m.%d.%H.%M.%S', time.localtime(time.time())) requestdatetime = time.time() requestdict['requestdatetime'] = requestdatetime responsesocketdict[seqid.__str__()] = requestdict logger.debug("responsesocketdict len = %d", len(responsesocketdict)) while True: request = sockobj.recv(recv_buf_len) # logger.warning("request : %s" % (request)) recved = len(request) # logger.warning("recved : %d" % (recved)) if (recved == 0): logger.warning("socket is closed by peer %r" % (sockobj)) sockobj.close() break nparsed = p.execute(request, recved) logger.warning("nparsed : %d" % (nparsed)) if nparsed != recved: logger.warning("parse error") sockobj.close() break if p.is_headers_complete(): request_headers = p.get_headers() # for key in request_headers: # logger.debug("headers complete %s" % (request_headers.__str__())) # logger.warning("headers complete") if p.is_partial_body(): body.append(p.recv_body()) # logger.warning("body : %s" % (body)) if p.is_message_complete(): # logger.warning("message complete") break content = "".join(body) # seqid = uuid.uuid1() routekey = "" servicepath = "" # 如果是/xxx格式认为是route key,如果是/xxx/yyy/zzz格式认为是dest service request_path = p.get_path()[1:] # logger.warning('PublishThread request_path (%s), is routekey (%d)' % (request_path, request_path.find('/'))) # logger.debug("content : %s" % (content)) servicelist = os.listdir('./apps') if request_path.find('/') == -1 and len( request_path) and request_path in servicelist: routekey = "A:Queue:%s" % request_path if request_path in _config: routekey = _config[request_path]['Consumer_Queue_Name'] if len(content) == 0: content_json = dict() else: content_json = json.loads(content) content_json['sockid'] = seqid.__str__() content_json['from'] = selfqueuename _redis.lpush(routekey, json.dumps(content_json)) else: ret = dict() ret['error_code'] = '40004' sockobj.sendall('HTTP/1.1 200 OK\n\n%s' % (json.dumps(ret))) sockobj.shutdown(socket.SHUT_WR) sockobj.close() # requestdict = dict() # requestdict['sock'] = sockobj # requestdatetime = time.strftime( # '%Y.%m.%d.%H.%M.%S', time.localtime(time.time())) # requestdict['requestdatetime'] = requestdatetime # responsesocketdict[seqid.__str__()] = requestdict # sockobj.sendall('HTTP/1.1 200 OK\n\nWelcome %s' % ( # seqid)) # sockobj.close() except Exception as e: logger.error("recvrawsocket2 %s except raised : %s " % (e.__class__, e.args))
if (not (p.get_method() == "GET" or p.get_method() == "HEAD")): # 501 Not Implemented response = "HTTP/1.1 501 Not Implemented\r\n" response += "Date: " + date + "\r\n" response += "Server: David's Python Server (Ubuntu)\r\n" message = "Method Not Implemented, 501" response += "Content-Length: " + str(len(message)) + "\r\n" response += "Content-Type: text/html\r\n\r\n" response += message self.clients[fd].send(response) return #find the file from the url file_path = "" if (p.get_path() == '/'): file_path = self.path + "/index.html" else: file_path = "./" + self.path + p.get_path() try: open(file_path) except IOError as (errno, strerror): response = "" if errno == 13: # 403 Forbidden response = "HTTP/1.1 403 Forbidden\r\n" response += "Date: " + date + "\r\n" response += "Server: David's Python Server (Ubuntu)\r\n" message = "Access Forbidden, 403" response += "Content-Length: " + str(
class PupyHTTPWrapperServer(BasePupyTransport): path = '/index.php?d=' allowed_methods = ('GET') server = None headers = { 'Content-Type': 'text/html; charset=utf-8', 'Server': 'Apache', 'Connection': 'close', } __slots__ = ( 'parser', 'is_http', 'body', 'downstream_buffer', 'well_known', 'omit', 'probe_len' ) def __init__(self, *args, **kwargs): super(PupyHTTPWrapperServer, self).__init__(*args, **kwargs) self.parser = HttpParser() self.is_http = None self.body = [] self.downstream_buffer = Buffer() self.well_known = ('GET', 'POST', 'OPTIONS', 'HEAD', 'PUT', 'DELETE') self.omit = tuple( '{} {}'.format(x, y) for x in self.well_known for y in ( self.path, '/ws/', 'ws/')) self.probe_len = max(len(x) for x in self.omit) def _http_response(self, code, status, headers=None, datasize=None, content=None): headers = {} headers.update(self.headers) if headers: headers.update(headers) if datasize: headers.update({ 'Content-Length': datasize, 'Content-Type': 'application/octet-steram', }) data = '\r\n'.join([ 'HTTP/1.1 {} {}'.format(code, status), '\r\n'.join([ '{}: {}'.format(key, value) for key,value in headers.iteritems() ]) ]) + '\r\n\r\n' self.downstream.write(data) def _handle_file(self, filepath): try: with open(filepath) as infile: size = stat(filepath).st_size self._http_response(200, 'OK', datasize=size) while True: data = infile.read(65535) if data: self.downstream.write(data) else: break except: self._http_response(404, 'Not found', 'Not found') def _handle_not_found(self): self._http_response(404, 'Not found', 'Not found') def _handle_http(self, data): self.parser.execute(data, len(data)) if self.parser.is_headers_complete(): try: if not self.parser.get_method() in self.allowed_methods: self._http_response(405, 'Method Not Allowed') else: urlpath = self.parser.get_path() urlpath = [ x.strip() for x in urlpath.split('/') if ( x and not str(x) in ('.', '..') ) ] root = self.server.config.get_folder('wwwroot') secret = self.server.config.getboolean('httpd', 'secret') log = self.server.config.getboolean('httpd', 'log') if secret: wwwsecret = self.server.config.get('randoms', 'wwwsecret', random=5) if not (urlpath and urlpath[0] == wwwsecret): self._handle_not_found() if log: self.server.handler.display_error('{}: GET {} | SECRET = {}'.format( '{}:{}'.format(*self.downstream.transport.peer[:2]), urlpath, wwwsecret)) return urlpath = urlpath[1:] urlpath = path.sep.join([ self.server.config.get('randoms', x, new=False) or x for x in urlpath ]) if not urlpath: urlpath = 'index.html' filepath = path.join(root, urlpath) if path.exists(filepath): self._handle_file(filepath) if log: self.server.handler.display_success('{}: GET {}'.format( '{}:{}'.format(*self.downstream.transport.peer[:2]), urlpath)) else: self._handle_not_found() if log: self.server.handler.display_error('{}: GET {}'.format( '{}:{}'.format(*self.downstream.transport.peer[:2]), urlpath)) except Exception, e: print "Exception: {}".format(e) finally:
def parse(self): data = [{ 'label': '以太网帧头部 / Ethernet Headers', 'value': '', 'bold': True, 'children': [{ 'label': '目的端 MAC 地址', 'value': self.ethHeader.destMac }, { 'label': '发送端 MAC 地址', 'value': self.ethHeader.sourceMac }, { 'label': '帧类型', 'value': '%s (0x%s)' % (self.ethHeader.type, self.ethHeader.type_code) }] }] if self.protocol == 'ARP': data.append({ 'label': 'ARP 消息 / Address Resolution Protocol', 'value': '', 'bold': True, 'children': [{ 'label': '硬件类型', 'value': '%s (%s)' % (self.arpBody.hardware_type, self.arpBody.hardware_type_code) }, { 'label': '协议类型', 'value': '%s (0x%s)' % (self.arpBody.protocol_type, self.arpBody.protocol_type_code) }, { 'label': '硬件地址长度', 'value': str(self.arpBody.hardware_size) }, { 'label': '协议地址长度', 'value': str(self.arpBody.protocol_size) }, { 'label': '操作码', 'value': '%s (%s)' % (self.arpBody.operation, self.arpBody.operation_code) }, { 'label': '发送端 MAC 地址', 'value': self.arpBody.sender_mac_address }, { 'label': '发送端 IP 地址', 'value': self.arpBody.sender_ip_address }, { 'label': '目的端 MAC 地址', 'value': self.arpBody.target_mac_address }, { 'label': '目的端 IP 地址', 'value': self.arpBody.target_ip_address }] }) else: if self.ipHeader.version == 4: self.ipHeader.verifyChecksum = verifyChecksum( self.ipHeader.header_raw, [], '').verifyChecksum data.append({ 'label': 'IPv4 头部 / IPv4 Header', 'value': '', 'bold': True, 'children': [{ 'label': '协议版本', 'value': self.ipHeader.version }, { 'label': '头部长度', 'value': str(self.ipHeader.header_length) + ' Bytes' }, { 'label': '服务类型', 'value': '0x%s' % (self.ipHeader.differentiated_services) }, { 'label': '来源 IP', 'value': self.ipHeader.source_ip }, { 'label': '目标 IP', 'value': self.ipHeader.dest_ip }, { 'label': '总长度', 'value': self.ipHeader.total_length }, { 'label': '标识', 'value': '0x%s (%s)' % (self.ipHeader.identification, self.ipHeader.identification_int) }, { 'label': '标志', 'value': '%s' % (self.ipHeader.flags.raw), 'children': [{ 'label': '保留位', 'value': '%s | %s... .... .... ....' % (self.ipHeader.flags.reserved, int(self.ipHeader.flags.reserved)) }, { 'label': 'Don\'t fragment', 'value': '%s | .%s.. .... .... ....' % (self.ipHeader.flags.fragment, int(self.ipHeader.flags.fragment)) }, { 'label': 'More fragments', 'value': '%s | ..%s. .... .... ....' % (self.ipHeader.flags.more_fragment, int(self.ipHeader.flags.more_fragment)) }, { 'label': '分段偏移', 'value': '%s | ...%s' % (self.ipHeader.flags.fragment_offset, self.ipHeader.flags.fragment_offset_bin) }] }, { 'label': '生存期', 'value': self.ipHeader.time_to_live }, { 'label': '协议', 'value': '%s (%s)' % (self.ipHeader.protocol, self.ipHeader.protocol_code) }, { 'label': '校验和', 'value': '0x%s (%s)' % (self.ipHeader.origin_checksum, '校验' + { True: '通过', False: '失败' }[self.ipHeader.verifyChecksum]) }] }) else: ipv6_header = { 'label': 'IPv6 头部 / IPv6 Header', 'value': '', 'bold': True, 'children': [{ 'label': '协议版本', 'value': self.ipHeader.version }, { 'label': '通信分类', 'value': '0x%s' % (self.ipHeader._class) }, { 'label': '流标签', 'value': '0x%s' % (self.ipHeader.float_label) }, { 'label': '有效载荷长度', 'value': self.ipHeader.payload_length }, { 'label': '下一头部类型', 'value': '%s (%s)' % (self.ipHeader.next_header, self.ipHeader.next_header_code) }, { 'label': '跳数限制', 'value': self.ipHeader.hop_limit }, { 'label': '源 IP', 'value': self.ipHeader.source_ip }, { 'label': '目的 IP', 'value': self.ipHeader.dest_ip }] } for option in self.ipHeader.options: ipv6_header['children'].append({ 'label': consts.protocol_types[str(option['code'])], 'value': '0x' + option['value'], 'children': [{ 'label': '下一头部类型', 'value': '%s (%s)' % (consts.protocol_types[str( option['next_header'])], option['next_header']) }] }) data.append(ipv6_header) if self.ipHeader.version == 4 and self.ipHeader.flags.more_fragment == True: # print('Waiting for more fragments.') ids = self.ip_ids[self.ipHeader.identification_int] slicing = { 'label': 'IP 分片', 'value': '共 %s 个数据包' % len(ids), 'bold': True, 'children': [] } for id in ids: slicing['children'].append({ 'label': '#%s' % id, 'value': '%s Bytes' % (self.ip_packets[id].length / 8) }) data.append(slicing) else: if self.ipHeader.protocol == 'TCP': self.ipBody.tcpHeader.verifyChecksum = verifyChecksum( self.ipBody.parameters[0], self.ipBody.parameters[1], self.ipHeader.protocol).verifyChecksum self.ipBody.tcpHeader.options = tcpOptions( BitArray(self.ipBodyRaw) [160:self.ipBody.tcpHeader.header_length * 8]).options tcp_header = { 'label': 'TCP 头部 / Transmission Control Protocol Header', 'value': '', 'bold': True, 'children': [{ 'label': '源端口', 'value': self.ipBody.tcpHeader.source_port }, { 'label': '目的端口', 'value': self.ipBody.tcpHeader.destination_port }, { 'label': '数据序号 (seq)', 'value': self.ipBody.tcpHeader.sequence_number }, { 'label': '确认序号 (ack)', 'value': self.ipBody.tcpHeader.acknowledge_number }, { 'label': '首部长度', 'value': self.ipBody.tcpHeader.header_length }, { 'label': '标志位', 'value': '0x' + self.ipBody.tcpHeader.flags_raw, 'children': [{ 'label': 'Reserved', 'value': '%s | %s. .... ....' % (self.ipBody.tcpHeader.flags.reserved.uint, self.ipBody.tcpHeader.flags.reserved.bin) }, { 'label': 'Nonce', 'value': '%s | ...%d .... ....' % (self.ipBody.tcpHeader.flags.nonce, self.ipBody.tcpHeader.flags.nonce) }, { 'label': 'Congestion Window Reduced', 'value': '%s | .... %d... ....' % (self.ipBody.tcpHeader.flags.cwr, self.ipBody.tcpHeader.flags.cwr) }, { 'label': 'ECN-Echo', 'value': '%s | .... .%d.. ....' % (self.ipBody.tcpHeader.flags.ecn_echo, self.ipBody.tcpHeader.flags.ecn_echo) }, { 'label': 'Urgent', 'value': '%s | .... ..%d. ....' % (self.ipBody.tcpHeader.flags.urgent, self.ipBody.tcpHeader.flags.urgent) }, { 'label': 'Acknowledgment', 'value': '%s | .... ...%d ....' % (self.ipBody.tcpHeader.flags.acknowledgement, self.ipBody.tcpHeader.flags.acknowledgement) }, { 'label': 'Push', 'value': '%s | .... .... %d...' % (self.ipBody.tcpHeader.flags.push, self.ipBody.tcpHeader.flags.push) }, { 'label': 'Reset', 'value': '%s | .... .... .%d..' % (self.ipBody.tcpHeader.flags.reset, self.ipBody.tcpHeader.flags.reset) }, { 'label': 'Syn', 'value': '%s | .... .... ..%d.' % (self.ipBody.tcpHeader.flags.syn, self.ipBody.tcpHeader.flags.syn) }, { 'label': 'Fin', 'value': '%s | .... .... ...%d' % (self.ipBody.tcpHeader.flags.fin, self.ipBody.tcpHeader.flags.fin) }] }, { 'label': '窗口大小', 'value': self.ipBody.tcpHeader.window_size }, { 'label': '校验和', 'value': '0x%s (%s)' % (self.ipBody.tcpHeader.checksum, '校验' + { True: '通过', False: '失败' }[self.ipBody.tcpHeader.verifyChecksum]) }] } options = [] if self.ipBody.tcpHeader.options: for idx in range(len(self.ipBody.tcpHeader.options)): option = { 'label': self.ipBody.tcpHeader.options[idx][0]['label'], 'value': '(%s)' % self.ipBody.tcpHeader.options[idx][0]['value'], 'children': self.ipBody.tcpHeader.options[idx][1:] } options.append(option) if options: tcp_header['children'].append({ 'label': '选项', 'value': '', 'children': options }) data.append(tcp_header) print(self.id) print(tcp_bodies) if self.id in packet_id_struct: tmp = [] http_payload = None for p_id in packet_id_struct[self.id]: tmp.append({'value': '', 'label': '#%s' % p_id}) if self.id in tcp_bodies: # print(tcp_bodies[self.id]['data'].decode('utf-8', 'ignore')) children = [{ 'label': '该包是 TCP 分段的最后一段, 可以通过右下角按钮「导出 TCP 分段数据」.', 'value': '', 'bold': True }, { 'label': '共 %s 个分段' % len(tmp), 'value': '', 'bold': True, 'children': tmp }] try: p = HttpParser() recved = len(tcp_bodies[self.id]['data']) nparsed = p.execute( tcp_bodies[self.id]['data'], recved) assert nparsed == recved headers = [] for header in p.get_headers(): headers.append({ 'label': header, 'value': p.get_headers()[header] }) print(p.get_path(), p.get_url(), p.get_fragment(), p.get_method(), p.get_query_string(), p.get_status_code(), p.get_wsgi_environ()) http_payload = [{ 'label': 'HTTP 版本', 'value': '%s.%s' % (p.get_version()[0], p.get_version()[1]) }, { 'label': 'HTTP 头部', 'value': '', 'children': headers }] if len(p.get_url()) != 0: http_payload.append({ 'label': '请求方式', 'value': p.get_method() }) http_payload.append({ 'label': '路径', 'value': p.get_url() }) http_payload.append({ 'label': '请求参数', 'value': p.get_query_string() }) http_payload.append({ 'label': '主机名', 'value': p.get_wsgi_environ()['HTTP_HOST'] }) else: http_payload.append({ 'label': '状态码', 'value': p.get_status_code() }) except AssertionError: pass else: children = [{ 'label': '共 %s 个分段' % len(tmp), 'value': '', 'bold': True, 'children': tmp }] data.append({ 'label': 'TCP 数据 / TCP Payload', 'value': '', 'bold': True, 'children': children }) if http_payload != None: data.append({ 'label': 'HTTP 数据 / HTTP Data', 'value': '', 'bold': True, 'children': http_payload }) ''' if self.ipBody.tcpBody.has_body: try: p = HttpParser() recved = len(self.ipBody.tcpBody.buf) nparsed = p.execute(self.ipBody.tcpBody.buf, recved) assert nparsed == recved print(p.get_headers()) except AssertionError: print('NOT HTTP') data.append({ 'label': 'TCP 数据 / Data', 'value': '', 'bold': True, 'children': [ { 'label': '数据', 'value': self.ipBody.tcpBody.raw } ] }) ''' elif self.ipHeader.protocol == 'UDP': self.ipBody.udpHeader.verifyChecksum = verifyChecksum( self.ipBody.parameters[0], self.ipBody.parameters[1], self.ipHeader.protocol).verifyChecksum data.append({ 'label': 'UDP 头部 / User Datagram Protocol Header', 'value': '', 'bold': True, 'children': [{ 'label': '源端口', 'value': self.ipBody.udpHeader.source_port }, { 'label': '目的端口', 'value': self.ipBody.udpHeader.destination_port }, { 'label': '长度', 'value': self.ipBody.udpHeader.length }, { 'label': '校验和', 'value': '0x%s (%s)' % (self.ipBody.udpHeader.checksum, '校验' + { True: '通过', False: '失败' }[self.ipBody.udpHeader.verifyChecksum]) }] }) if self.ipBody.udpHeader.source_port == 53 or self.ipBody.udpHeader.destination_port == 53: # DNS children = [{ 'label': '会话标识', 'value': self.ipBody.dnsBody.transaction_id }, { 'label': '标志', 'value': '0x' + self.ipBody.dnsBody.transaction_id }, { 'label': '问题数', 'value': self.ipBody.dnsBody.questions }, { 'label': '回答资源记录数', 'value': self.ipBody.dnsBody.answer_rrs }, { 'label': '授权资源记录数', 'value': self.ipBody.dnsBody.authority_rrs }, { 'label': '附加资源记录数', 'value': self.ipBody.dnsBody.additional_rrs }] if len(self.ipBody.dnsBody.queries) > 0: queries = [] for query in self.ipBody.dnsBody.queries: queries.append({ 'label': str(query.qname), 'value': '', 'bold': True, 'children': [{ 'label': '域名', 'value': str(query.qname) }, { 'label': 'Type', 'value': '%s (%s)' % (consts.dns_types[query.qtype], query.qtype) }, { 'label': 'Class', 'value': '%s (%s)' % (consts.dns_classes[query.qclass], query.qclass) }] }) children.append({ 'label': '查询问题', 'value': '', 'bold': True, 'children': queries }) if len(self.ipBody.dnsBody.answers) > 0: answers = [] for answer in self.ipBody.dnsBody.answers: answers.append({ 'label': str(answer.rname), 'value': '', 'bold': True, 'children': [{ 'label': '域名', 'value': str(answer.rname) }, { 'label': 'Type', 'value': '%s (%s)' % (consts.dns_types[answer.rtype], answer.rtype) }, { 'label': 'Class', 'value': '%s (%s)' % (consts.dns_classes[answer.rclass], answer.rclass) }, { 'label': '生存时间 (ttl)', 'value': str(answer.ttl) }, { 'label': '数据', 'value': str(answer.rdata) }] }) children.append({ 'label': '回答', 'value': '', 'bold': True, 'children': answers }) data.append({ 'label': 'DNS / Domain Name System', 'value': '', 'bold': True, 'children': children }) elif 'ICMP' in self.ipHeader.protocol: if 'IPv6' in self.ipHeader.protocol: self.ipBody.icmpHeader.verifyChecksum = verifyChecksum( self.ipBody.parameters[0], self.ipBody.parameters[1], self.ipHeader.protocol).verifyChecksum else: self.ipBody.icmpHeader.verifyChecksum = verifyChecksum( self.ipBody.parameters, [], '').verifyChecksum data.append({ 'label': 'ICMP 头部 / Internet Control Message Protocol Headers', 'value': '', 'bold': True, 'children': [{ 'label': '类型', 'value': '%s (%s)' % (self.ipBody.icmpHeader.type, self.ipBody.icmpHeader.type_name) }, { 'label': '代码', 'value': self.ipBody.icmpHeader.code }, { 'label': '校验和', 'value': '0x%s (%s)' % (self.ipBody.icmpHeader.checksum, '校验' + { True: '通过', False: '失败' }[self.ipBody.icmpHeader.verifyChecksum]) }] }) elif 'IGMP' in self.ipHeader.protocol: if self.ipHeader.payload_length == 8: self.ipBody.igmpHeader.verifyChecksum = verifyChecksum( self.ipBody.parameters, [], '').verifyChecksum data.append({ 'label': 'IGMP 头部 / Internet Group Management Protocol Headers', 'value': '', 'bold': True, 'children': [{ 'label': '类型', 'value': '0x%s(%s)' % (self.ipBody.igmpHeader.type, self.ipBody.igmpHeader.type_name) }, { 'label': '最大响应时延', 'value': '%s 秒(0x%s)' % (self.ipBody.igmpHeader.maxRespTime, self.ipBody.igmpHeader.maxRespTimeHex) }, { 'label': '校验和', 'value': '0x%s(%s)' % (self.ipBody.igmpHeader.checksum, '校验' + { True: '通过', False: '失败' }[self.ipBody.igmpHeader.verifyChecksum]) }, { 'label': '组地址', 'value': self.ipBody.igmpHeader.groupAddress }] }) else: self.ipBody.igmpv3Header.verifyChecksum = verifyChecksum( self.ipBody.parameters, [], '').verifyChecksum data.append({ 'label': 'IGMPv3 头部 / Internet Group Management Protocol Version 3 Headers', 'value': '', 'bold': True, 'children': [{ 'label': '类型', 'value': '0x%s' % self.ipBody.igmpv3Header.type }, { 'label': '校验和', 'value': '0x%s(%s)' % (self.ipBody.igmpv3Header.checksum, '校验' + { True: '通过', False: '失败' }[self.ipBody.igmpv3Header.verifyChecksum]) }] }) return data