def mitm_request(self, data): r = HTTPRequest(data) # determine url if self.is_connect: scheme = "https" else: scheme = "http" url = scheme + "://" + self.hostname if scheme == "http" and int(self.port) != 80 or scheme == "https" and int(self.port) != 443: url += ":" + self.port url += self.path if args.verbose: print(url) self.doc = DocHTTPRequestResponse(host=self.hostname, port=int(self.port), protocol=scheme) self.doc.meta.index = args.index self.doc.request.url = url self.doc.request.requestline = r.requestline self.doc.request.method = r.command self.doc.host = self.hostname self.doc.port = int(self.port) self.doc.protocol = scheme return data
def applyConfig(self): try: print("Connecting to '%s', index '%s'" % (self.confESHost, self.confESIndex)) res = connections.create_connection(hosts=[self.confESHost]) idx = Index(self.confESIndex) idx.doc_type(DocHTTPRequestResponse) DocHTTPRequestResponse.init() try: idx.create() except: pass except Exception as e: JOptionPane.showMessageDialog(self.panel, "<html><p style='width: 300px'>Error while initializing ElasticSearch: %s</p></html>" % (str(e)), "Error", JOptionPane.ERROR_MESSAGE)
def applyConfig(self): try: print("Connecting to '%s', index '%s'" % (self.confESHost, self.confESIndex)) res = connections.create_connection(hosts=[self.confESHost]) idx = Index(self.confESIndex) idx.doc_type(DocHTTPRequestResponse) DocHTTPRequestResponse.init() try: idx.create() except: pass except Exception as e: JOptionPane.showMessageDialog( self.panel, "<html><p style='width: 300px'>Error while initializing ElasticSearch: %s</p></html>" % (str(e)), "Error", JOptionPane.ERROR_MESSAGE)
def mitm_request(self, data): # Initialize ES connection and index res = connections.create_connection(hosts=[args.elasticsearch]) idx = Index(args.index) idx.doc_type(DocHTTPRequestResponse) try: DocHTTPRequestResponse.init() idx.create() except: pass r = HTTPRequest(data) # determine url if self.is_connect: scheme = "https" else: scheme = "http" url = scheme + "://" + self.hostname if scheme == "http" and int( self.port) != 80 or scheme == "https" and int( self.port) != 443: url += ":" + self.port url += self.path if args.verbose: print(url) self.doc = DocHTTPRequestResponse(host=self.hostname, port=int(self.port), protocol=scheme) self.doc.meta.index = args.index self.doc.request.url = url self.doc.request.requestline = r.requestline self.doc.request.method = r.command self.doc.host = self.hostname self.doc.port = int(self.port) self.doc.protocol = scheme return data
def mitm_request(self, data): # Initialize ES connection and index res = connections.create_connection(hosts=[args.elasticsearch]) idx = Index(args.index) idx.doc_type(DocHTTPRequestResponse) try: DocHTTPRequestResponse.init() idx.create() except: pass r = HTTPRequest(data) # determine url if self.is_connect: scheme = "https" else: scheme = "http" url = scheme + "://" + self.hostname if scheme == "http" and int(self.port) != 80 or scheme == "https" and int(self.port) != 443: url += ":" + str(self.port) url += self.path if args.verbose: print(url) self.doc = DocHTTPRequestResponse(host=self.hostname, port=int(self.port), protocol=scheme) self.doc.meta.index = args.index self.doc.request.url = url self.doc.request.requestline = r.requestline self.doc.request.method = r.command self.doc.host = self.hostname self.doc.port = int(self.port) self.doc.protocol = scheme return data
def saveToES(self, msg): httpService = msg.getHttpService() doc = DocHTTPRequestResponse(protocol=httpService.getProtocol(), host=httpService.getHost(), port=httpService.getPort()) request = msg.getRequest() response = msg.getResponse() if request: iRequest = self.helpers.analyzeRequest(msg) doc.request.method = iRequest.getMethod() doc.request.url = iRequest.getUrl().toString() headers = iRequest.getHeaders() for header in headers: try: doc.add_request_header(header) except: doc.request.requestline = header parameters = iRequest.getParameters() for parameter in parameters: ptype = parameter.getType() if ptype == IParameter.PARAM_URL: typename = "url" elif ptype == IParameter.PARAM_BODY: typename = "body" elif ptype == IParameter.PARAM_COOKIE: typename = "cookie" elif ptype == IParameter.PARAM_XML: typename = "xml" elif ptype == IParameter.PARAM_XML_ATTR: typename = "xmlattr" elif ptype == IParameter.PARAM_MULTIPART_ATTR: typename = "multipartattr" elif ptype == IParameter.PARAM_JSON: typename = "json" else: typename = "unknown" name = parameter.getName() value = parameter.getValue() doc.add_request_parameter(typename, name, value) ctype = iRequest.getContentType() if ctype == IRequestInfo.CONTENT_TYPE_NONE: doc.request.content_type = "none" elif ctype == IRequestInfo.CONTENT_TYPE_URL_ENCODED: doc.request.content_type = "urlencoded" elif ctype == IRequestInfo.CONTENT_TYPE_MULTIPART: doc.request.content_type = "multipart" elif ctype == IRequestInfo.CONTENT_TYPE_XML: doc.request.content_type = "xml" elif ctype == IRequestInfo.CONTENT_TYPE_JSON: doc.request.content_type = "json" elif ctype == IRequestInfo.CONTENT_TYPE_AMF: doc.request.content_type = "amf" else: doc.request.content_type = "unknown" bodyOffset = iRequest.getBodyOffset() doc.request.body = request[bodyOffset:].tostring().decode("ascii", "replace") if response: iResponse = self.helpers.analyzeResponse(response) doc.response.status = iResponse.getStatusCode() doc.response.content_type = iResponse.getStatedMimeType() doc.response.inferred_content_type = iResponse.getInferredMimeType() headers = iResponse.getHeaders() for header in headers: try: doc.add_response_header(header) except: doc.response.responseline = header cookies = iResponse.getCookies() for cookie in cookies: expCookie = cookie.getExpiration() expiration = None if expCookie: try: expiration = datetime.fromtimestamp(expCookie.time / 1000) except: pass doc.add_response_cookie(cookie.getName(), cookie.getValue(), cookie.getExpiration(), cookie.getPath(), expiration) bodyOffset = iResponse.getBodyOffset() doc.response.body = response[bodyOffset:].tostring().decode("ascii", "replace") doc.save()
class WASEProxyHandler(ProxyHandler): """Intercepts HTTP(S) requests/responses, extracts data and feeds ElasticSearch""" def mitm_request(self, data): r = HTTPRequest(data) # determine url if self.is_connect: scheme = "https" else: scheme = "http" url = scheme + "://" + self.hostname if scheme == "http" and int(self.port) != 80 or scheme == "https" and int(self.port) != 443: url += ":" + self.port url += self.path if args.verbose: print(url) self.doc = DocHTTPRequestResponse(host=self.hostname, port=int(self.port), protocol=scheme) self.doc.meta.index = args.index self.doc.request.url = url self.doc.request.requestline = r.requestline self.doc.request.method = r.command self.doc.host = self.hostname self.doc.port = int(self.port) self.doc.protocol = scheme return data def mitm_response(self, data): lines = data.split("\r\n") r = HTTPResponse(FakeSocket(data)) r.begin() # response line self.doc.response.status = r.status self.doc.response.responseline = lines[0] # headers ct = "" cookies = list() for header in r.getheaders(): name = header[0] value = header[1] self.doc.add_parsed_response_header(name, value) if name == "content-type": ct = value elif name == "set-cookie": cookies.append(value) # content type try: m = reContentType.search(ct) self.doc.response.content_type = m.group(1) except: pass # cookies for cookie in cookies: # TODO: the following code extracts only partial cookie data - check/rewrite try: pc = SimpleCookie(cookie) for name in pc.keys(): c = pc[name] try: value = c.value except AttributeError: value = None try: domain = c.domain except AttributeError: domain = None try: path = c.path except AttributeError: path = None try: exp = c.expires except AttributeError: exp = None self.doc.add_response_cookie(name, value, domain, path, exp) except: pass # body bodybytes = r.read() self.doc.response.body = bodybytes.decode(args.charset, args.encodingerrors) try: self.doc.save(storeResponseBody) except: pass return data
argparser = argparse.ArgumentParser(description="Intercepting HTTP(S) proxy that forwards data into ElasticSearch WASE datastructure") argparser.add_argument("--listenaddr", "-l", default="localhost", help="IP/hostname the server binds to (default: %(default)s)") argparser.add_argument("--port", "-p", type=int, default=8080, help="Port the proxy server listens to (default: %(default)s)") argparser.add_argument("--elasticsearch", "-e", default="localhost", help="ElasticSearch instance (default: %(default)s)") argparser.add_argument("--index", "-i", default="wase-proxy", help="ElasticSearch index (default: %(default)s)") argparser.add_argument("--no-response-body", "-n", action="store_true", help="Don't store response body in ElasticSearch") argparser.add_argument("--charset", "-c", default="utf-8", help="Character set used for decoding of bytes responses into string passed to ES (default: %(default)s)") argparser.add_argument("--encodingerrors", "-E", default="ignore", choices=["ignore", "replace", "strict"], help="Behavior when encoding errors occur, must be ignore, replace or strict (default: %(default)s)") argparser.add_argument("--verbose", "-v", action="store_true", help="Be verbose") args = argparser.parse_args() if args.no_response_body: storeResponseBody = False # Initialize ES connection and index res = connections.create_connection(hosts=[args.elasticsearch]) idx = Index(args.index) idx.doc_type(DocHTTPRequestResponse) try: DocHTTPRequestResponse.init() idx.create() except: pass # run proxy proxy = AsyncMitmProxy(RequestHandlerClass=WASEProxyHandler, server_address=(args.listenaddr, args.port)) try: proxy.serve_forever() except KeyboardInterrupt: proxy.server_close()
def genESDoc(self, msg, timeStampFromResponse=False): httpService = msg.getHttpService() doc = DocHTTPRequestResponse(protocol=httpService.getProtocol(), host=httpService.getHost(), port=httpService.getPort()) doc.meta.index = self.confESIndex request = msg.getRequest() response = msg.getResponse() if request: iRequest = self.helpers.analyzeRequest(msg) doc.request.method = iRequest.getMethod() doc.request.url = iRequest.getUrl().toString() headers = iRequest.getHeaders() for header in headers: try: doc.add_request_header(header) except: doc.request.requestline = header parameters = iRequest.getParameters() for parameter in parameters: ptype = parameter.getType() if ptype == IParameter.PARAM_URL: typename = "url" elif ptype == IParameter.PARAM_BODY: typename = "body" elif ptype == IParameter.PARAM_COOKIE: typename = "cookie" elif ptype == IParameter.PARAM_XML: typename = "xml" elif ptype == IParameter.PARAM_XML_ATTR: typename = "xmlattr" elif ptype == IParameter.PARAM_MULTIPART_ATTR: typename = "multipartattr" elif ptype == IParameter.PARAM_JSON: typename = "json" else: typename = "unknown" name = parameter.getName() value = parameter.getValue() doc.add_request_parameter(typename, name, value) ctype = iRequest.getContentType() if ctype == IRequestInfo.CONTENT_TYPE_NONE: doc.request.content_type = "none" elif ctype == IRequestInfo.CONTENT_TYPE_URL_ENCODED: doc.request.content_type = "urlencoded" elif ctype == IRequestInfo.CONTENT_TYPE_MULTIPART: doc.request.content_type = "multipart" elif ctype == IRequestInfo.CONTENT_TYPE_XML: doc.request.content_type = "xml" elif ctype == IRequestInfo.CONTENT_TYPE_JSON: doc.request.content_type = "json" elif ctype == IRequestInfo.CONTENT_TYPE_AMF: doc.request.content_type = "amf" else: doc.request.content_type = "unknown" bodyOffset = iRequest.getBodyOffset() doc.request.body = request[bodyOffset:].tostring().decode( "ascii", "replace") if response: iResponse = self.helpers.analyzeResponse(response) doc.response.status = iResponse.getStatusCode() doc.response.content_type = iResponse.getStatedMimeType() doc.response.inferred_content_type = iResponse.getInferredMimeType( ) headers = iResponse.getHeaders() dateHeader = None for header in headers: try: doc.add_response_header(header) match = reDateHeader.match(header) if match: dateHeader = match.group(1) except: doc.response.responseline = header cookies = iResponse.getCookies() for cookie in cookies: expCookie = cookie.getExpiration() expiration = None if expCookie: try: expiration = str( datetime.fromtimestamp(expCookie.time / 1000)) except: pass doc.add_response_cookie(cookie.getName(), cookie.getValue(), cookie.getDomain(), cookie.getPath(), expiration) bodyOffset = iResponse.getBodyOffset() doc.response.body = response[bodyOffset:].tostring().decode( "ascii", "replace") if timeStampFromResponse: if dateHeader: try: doc.timestamp = datetime.fromtimestamp( mktime_tz(parsedate_tz(dateHeader)), tz) # try to use date from response header "Date" self.lastTimestamp = doc.timestamp except: doc.timestamp = self.lastTimestamp # fallback: last stored timestamp. Else: now return doc
def genESDoc(self, msg, timeStampFromResponse=False): httpService = msg.getHttpService() doc = DocHTTPRequestResponse(protocol=httpService.getProtocol(), host=httpService.getHost(), port=httpService.getPort()) doc.meta.index = self.confESIndex request = msg.getRequest() response = msg.getResponse() if request: iRequest = self.helpers.analyzeRequest(msg) doc.request.method = iRequest.getMethod() doc.request.url = iRequest.getUrl().toString() headers = iRequest.getHeaders() for header in headers: try: doc.add_request_header(header) except: doc.request.requestline = header parameters = iRequest.getParameters() for parameter in parameters: ptype = parameter.getType() if ptype == IParameter.PARAM_URL: typename = "url" elif ptype == IParameter.PARAM_BODY: typename = "body" elif ptype == IParameter.PARAM_COOKIE: typename = "cookie" elif ptype == IParameter.PARAM_XML: typename = "xml" elif ptype == IParameter.PARAM_XML_ATTR: typename = "xmlattr" elif ptype == IParameter.PARAM_MULTIPART_ATTR: typename = "multipartattr" elif ptype == IParameter.PARAM_JSON: typename = "json" else: typename = "unknown" name = parameter.getName() value = parameter.getValue() doc.add_request_parameter(typename, name, value) ctype = iRequest.getContentType() if ctype == IRequestInfo.CONTENT_TYPE_NONE: doc.request.content_type = "none" elif ctype == IRequestInfo.CONTENT_TYPE_URL_ENCODED: doc.request.content_type = "urlencoded" elif ctype == IRequestInfo.CONTENT_TYPE_MULTIPART: doc.request.content_type = "multipart" elif ctype == IRequestInfo.CONTENT_TYPE_XML: doc.request.content_type = "xml" elif ctype == IRequestInfo.CONTENT_TYPE_JSON: doc.request.content_type = "json" elif ctype == IRequestInfo.CONTENT_TYPE_AMF: doc.request.content_type = "amf" else: doc.request.content_type = "unknown" bodyOffset = iRequest.getBodyOffset() doc.request.body = request[bodyOffset:].tostring().decode("ascii", "replace") if response: iResponse = self.helpers.analyzeResponse(response) doc.response.status = iResponse.getStatusCode() doc.response.content_type = iResponse.getStatedMimeType() doc.response.inferred_content_type = iResponse.getInferredMimeType() headers = iResponse.getHeaders() dateHeader = None for header in headers: try: doc.add_response_header(header) match = reDateHeader.match(header) if match: dateHeader = match.group(1) except: doc.response.responseline = header cookies = iResponse.getCookies() for cookie in cookies: expCookie = cookie.getExpiration() expiration = None if expCookie: try: expiration = str(datetime.fromtimestamp(expCookie.time / 1000)) except: pass doc.add_response_cookie(cookie.getName(), cookie.getValue(), cookie.getDomain(), cookie.getPath(), expiration) bodyOffset = iResponse.getBodyOffset() doc.response.body = response[bodyOffset:].tostring().decode("ascii", "replace") if timeStampFromResponse: if dateHeader: try: doc.timestamp = datetime.fromtimestamp(mktime_tz(parsedate_tz(dateHeader)), tz) # try to use date from response header "Date" self.lastTimestamp = doc.timestamp except: doc.timestamp = self.lastTimestamp # fallback: last stored timestamp. Else: now return doc
class WASEProxyHandler(ProxyHandler): """Intercepts HTTP(S) requests/responses, extracts data and feeds ElasticSearch""" def mitm_request(self, data): # Initialize ES connection and index res = connections.create_connection(hosts=[args.elasticsearch]) idx = Index(args.index) idx.doc_type(DocHTTPRequestResponse) try: DocHTTPRequestResponse.init() idx.create() except: pass r = HTTPRequest(data) # determine url if self.is_connect: scheme = "https" else: scheme = "http" url = scheme + "://" + self.hostname if scheme == "http" and int(self.port) != 80 or scheme == "https" and int(self.port) != 443: url += ":" + str(self.port) url += self.path if args.verbose: print(url) self.doc = DocHTTPRequestResponse(host=self.hostname, port=int(self.port), protocol=scheme) self.doc.meta.index = args.index self.doc.request.url = url self.doc.request.requestline = r.requestline self.doc.request.method = r.command self.doc.host = self.hostname self.doc.port = int(self.port) self.doc.protocol = scheme return data def mitm_response(self, data): lines = data.split("\r\n") r = HTTPResponse(FakeSocket(data)) r.begin() # response line self.doc.response.status = r.status self.doc.response.responseline = lines[0].decode(args.charset, args.encodingerrors) # headers ct = "" cookies = list() for header in r.getheaders(): name = header[0].decode(args.charset, args.encodingerrors) value = header[1].decode(args.charset, args.encodingerrors) self.doc.add_parsed_response_header(name, value) if name == "content-type": ct = value elif name == "set-cookie": cookies.append(value) # content type try: m = reContentType.search(ct) self.doc.response.content_type = m.group(1) except: pass # cookies for cookie in cookies: # TODO: the following code extracts only partial cookie data - check/rewrite try: pc = SimpleCookie(cookie) for name in pc.keys(): c = pc[name] try: value = c.value except AttributeError: value = None try: domain = c.domain except AttributeError: domain = None try: path = c.path except AttributeError: path = None try: exp = c.expires except AttributeError: exp = None self.doc.add_response_cookie(name, value, domain, path, exp) except: pass # body bodybytes = r.read() self.doc.response.body = bodybytes.decode(args.charset, args.encodingerrors) self.doc.save(storeResponseBody) return data
from doc_HttpRequestResponse import DocHTTPRequestResponse from elasticsearch_dsl.connections import connections from elasticsearch_dsl import Index from datetime import datetime connections.create_connection(hosts=["localhost"]) idx = Index("test") idx.doc_type(DocHTTPRequestResponse) #idx.create() DocHTTPRequestResponse.init() d = DocHTTPRequestResponse( protocol="http", host="foobar.com", port=80 ) d.add_request_header("User-Agent: foobar") d.add_request_parameter("url", "id", "123") d.add_request_parameter("url", "doc", "234") d.add_response_header("X-Content-Type-Options: nosniff") d.add_response_header("X-Frame-Options: DENY") d.add_response_header("X-XSS-Protection: 1; mode=block") d.add_response_cookie("SESSIONID", "foobar1234") d.add_response_cookie("foo", "bar", "foobar.com", "/foo", datetime.now()) d.response.body = "This is a test!" d.request.method = "GET" d.save() d = DocHTTPRequestResponse(
from doc_HttpRequestResponse import DocHTTPRequestResponse from elasticsearch_dsl.connections import connections from elasticsearch_dsl import Index from datetime import datetime connections.create_connection(hosts=["localhost"]) idx = Index("test") idx.doc_type(DocHTTPRequestResponse) #idx.create() DocHTTPRequestResponse.init() d = DocHTTPRequestResponse(protocol="http", host="foobar.com", port=80) d.add_request_header("User-Agent: foobar") d.add_request_parameter("url", "id", "123") d.add_request_parameter("url", "doc", "234") d.add_response_header("X-Content-Type-Options: nosniff") d.add_response_header("X-Frame-Options: DENY") d.add_response_header("X-XSS-Protection: 1; mode=block") d.add_response_cookie("SESSIONID", "foobar1234") d.add_response_cookie("foo", "bar", "foobar.com", "/foo", datetime.now()) d.response.body = "This is a test!" d.request.method = "GET" d.save() d = DocHTTPRequestResponse(protocol="http", host="foobar.com", port=80) d.add_request_header("User-Agent: foobar") d.add_request_parameter("url", "id", "123") d.add_request_parameter("url", "doc", "456") d.add_response_header("X-Frame-Options: SAMEORIGIN")