async def handle_request(self, request, payload): print('Request path: {0}'.format(request.path)) data = self.create_data(request, 200) if request.method == 'POST': post_data = await payload.read() post_data = MultiDict(parse_qsl(post_data.decode('utf-8'))) print('POST data:') for key, val in post_data.items(): print('\t- {0}: {1}'.format(key, val)) data['post_data'] = dict(post_data) # Submit the event to the TANNER service event_result = await self.submit_data(data) # Log the event to slurp service if enabled if self.run_args.slurp_enabled: await self.submit_slurp(request.path) content, content_type, headers, status_code = await self.parse_tanner_response( request.path, event_result['response']['message']['detection']) response = aiohttp.Response(self.writer, status=status_code, http_version=request.version) for name, val in headers.items(): response.add_header(name, val) response.add_header('Server', self.run_args.server_header) if 'cookies' in data and 'sess_uuid' in data['cookies']: previous_sess_uuid = data['cookies']['sess_uuid'] else: previous_sess_uuid = None if event_result is not None and ( 'sess_uuid' in event_result['response']['message']): cur_sess_id = event_result['response']['message']['sess_uuid'] if previous_sess_uuid is None or not previous_sess_uuid.strip( ) or previous_sess_uuid != cur_sess_id: response.add_header('Set-Cookie', 'sess_uuid=' + cur_sess_id) if not content_type: response.add_header('Content-Type', 'text/plain') else: response.add_header('Content-Type', content_type) if content: response.add_header('Content-Length', str(len(content))) response.send_headers() if content: response.write(content) await response.write_eof()
def handle_request(self, request, payload): print('Request path: {0}'.format(request.path)) data = self.create_data(request, 200) if request.method == 'POST': post_data = yield from payload.read() post_data = MultiDict(parse_qsl(post_data.decode('utf-8'))) print('POST data:') for key, val in post_data.items(): print('\t- {0}: {1}'.format(key, val)) data['post_data'] = dict(post_data) # Submit the event to the TANNER service event_result = yield from self.submit_data(data) # Log the event to slurp service if enabled if self.run_args.slurp_enabled: yield from self.submit_slurp(request.path) response = aiohttp.Response(self.writer, status=200, http_version=request.version) mimetypes.add_type('text/html', '.php') if 'payload' in event_result['response']['message']['detection']: payload_content = event_result['response']['message']['detection'][ 'payload'] if type(payload_content) == dict: content_type = mimetypes.guess_type(payload_content['page'])[0] content = '<html><body></body></html>' base_path = '/'.join( ['/opt/snare/pages', self.run_args.page_dir]) if os.path.exists(base_path + payload_content['page']): with open(base_path + payload_content['page']) as p: content = p.read() soup = BeautifulSoup(content, 'html.parser') script_tag = soup.new_tag('div') script_tag.append( BeautifulSoup(payload_content['value'], 'html.parser')) soup.body.append(script_tag) content = str(soup).encode() else: content_type = mimetypes.guess_type(payload_content)[0] content = payload_content.encode('utf-8') else: base_path = '/'.join(['/opt/snare/pages', self.run_args.page_dir]) query = None if request.path == '/': parsed_url = self.run_args.index_page else: parsed_url = urlparse(unquote(request.path)) if parsed_url.query: query = '?' + parsed_url.query parsed_url = parsed_url.path if parsed_url.startswith('/'): parsed_url = parsed_url[1:] path = '/'.join([base_path, parsed_url]) content_type = mimetypes.guess_type(path)[0] if content_type is None and '.php' in path: content_type = 'text/html' if query is not None: path = os.path.normpath(path + query) else: path = os.path.normpath(path) if os.path.isfile(path) and path.startswith(base_path): with open(path, 'rb') as fh: content = fh.read() if content_type: if 'text/html' in content_type: content = yield from self.handle_html_content(content) else: content_type = None content = None response = aiohttp.Response(self.writer, status=404, http_version=request.version) if not content_type: response.add_header('Content-Type', 'text/plain') else: response.add_header('Content-Type', content_type) if content: response.add_header('Content-Length', str(len(content))) response.send_headers() if content: response.write(content) yield from response.write_eof()
def handle_request(self, request, payload): print('Request path: {0}'.format(request.path)) data = self.create_data(request, 200) if request.method == 'POST': post_data = yield from payload.read() post_data = MultiDict(parse_qsl(post_data.decode('utf-8'))) print('POST data:') for key, val in post_data.items(): print('\t- {0}: {1}'.format(key, val)) data['post_data'] = dict(post_data) # Submit the event to the TANNER service event_result = yield from self.submit_data(data) # Log the event to slurp service if enabled if self.run_args.slurp_enabled: yield from self.submit_slurp(request.path) response = aiohttp.Response( self.writer, status=200, http_version=request.version ) if 'payload' in event_result['response']['message']['detection']: payload_content = event_result['response']['message']['detection']['payload'] if type(payload_content) == dict: content_type = mimetypes.guess_type(payload_content['page'])[0] content = '<html><body></body></html>' base_path = '/'.join(['/opt/snare/pages', self.run_args.page_dir]) if os.path.exists(base_path + payload_content['page']): with open(base_path + payload_content['page']) as p: content = p.read() soup = BeautifulSoup(content, 'html.parser') script_tag = soup.new_tag('div') script_tag.append(BeautifulSoup(payload_content['value'], 'html.parser')) soup.body.append(script_tag) content = str(soup).encode() else: content_type = mimetypes.guess_type(payload_content)[0] content = payload_content.encode('utf-8') else: base_path = '/'.join(['/opt/snare/pages', self.run_args.page_dir]) if request.path == '/': parsed_url = self.run_args.index_page else: parsed_url = urlparse(unquote(request.path)).path if parsed_url.startswith('/'): parsed_url = parsed_url[1:] path = '/'.join( [base_path, parsed_url] ) path = os.path.normpath(path) if os.path.isfile(path) and path.startswith(base_path): with open(path, 'rb') as fh: content = fh.read() content_type = mimetypes.guess_type(path)[0] if content_type: if 'text/html' in content_type: content = yield from self.handle_html_content(content) else: content_type = None content = None response = aiohttp.Response( self.writer, status=404, http_version=request.version ) if not content_type: response.add_header('Content-Type', 'text/plain') else: response.add_header('Content-Type', content_type) if content: response.add_header('Content-Length', str(len(content))) response.send_headers() if content: response.write(content) yield from response.write_eof()
def handle_request(self, request, payload): print('Request path: {0}'.format(request.path)) data = self.create_data(request, 200) if request.method == 'POST': post_data = yield from payload.read() post_data = MultiDict(parse_qsl(post_data.decode('utf-8'))) print('POST data:') for key, val in post_data.items(): print('\t- {0}: {1}'.format(key, val)) data['post_data'] = dict(post_data) # Submit the event to the TANNER service event_result = yield from self.submit_data(data) # Log the event to slurp service if enabled if self.run_args.slurp_enabled: yield from self.submit_slurp(request.path) response = aiohttp.Response(self.writer, status=200, http_version=request.version) content_type = None mimetypes.add_type('text/html', '.php') mimetypes.add_type('text/html', '.aspx') base_path = os.path.join('/opt/snare/pages', self.run_args.page_dir) if event_result is not None and ( 'payload' in event_result['response']['message']['detection'] and event_result['response']['message']['detection']['payload'] is not None): payload_content = event_result['response']['message']['detection'][ 'payload'] if type(payload_content) == dict: if payload_content['page'].startswith('/'): payload_content['page'] = payload_content['page'][1:] page_path = os.path.join(base_path, payload_content['page']) content = '<html><body></body></html>' if os.path.exists(page_path): content_type = mimetypes.guess_type(page_path)[0] with open(page_path, encoding='utf-8') as p: content = p.read() soup = BeautifulSoup(content, 'html.parser') script_tag = soup.new_tag('div') script_tag.append( BeautifulSoup(payload_content['value'], 'html.parser')) soup.body.append(script_tag) content = str(soup).encode() else: content_type = mimetypes.guess_type(payload_content)[0] content = payload_content.encode('utf-8') else: query = None if request.path == '/': parsed_url = self.run_args.index_page else: parsed_url = urlparse(unquote(request.path)) if parsed_url.query: query = '?' + parsed_url.query parsed_url = parsed_url.path if parsed_url.startswith('/'): parsed_url = parsed_url[1:] path = os.path.normpath(os.path.join(base_path, parsed_url)) if os.path.isfile(path) and path.startswith(base_path): content_type = mimetypes.guess_type(path)[0] with open(path, 'rb') as fh: content = fh.read() if content_type: if 'text/html' in content_type: content = yield from self.handle_html_content(content) else: content_type = None content = None response = aiohttp.Response(self.writer, status=404, http_version=request.version) response.add_header('Server', self.run_args.server_header) if 'cookies' in data and 'sess_uuid' in data['cookies']: previous_sess_uuid = data['cookies']['sess_uuid'] else: previous_sess_uuid = None if event_result is not None and ( 'sess_uuid' in event_result['response']['message']): cur_sess_id = event_result['response']['message']['sess_uuid'] if previous_sess_uuid is None or not previous_sess_uuid.strip( ) or previous_sess_uuid != cur_sess_id: response.add_header('Set-Cookie', 'sess_uuid=' + cur_sess_id) if not content_type: response.add_header('Content-Type', 'text/plain') else: response.add_header('Content-Type', content_type) if content: response.add_header('Content-Length', str(len(content))) # logging of ip, stat, req, user, len, time if args.logger: req = request.method + ' /' + self.run_args.page_dir + request.path major = response.version.major minor = response.version.minor ver = " HTTP/" + str(major) + '.' + str(minor) req = req + ver user = getpass.getuser() tim = strftime("%d/%b/%Y:%H:%M:%S %z") d = { 'hostIP': request.headers['Host'], 'stat': response.status, 'req': req, 'user': user, 'content_length': response.headers['Content-Length'], 'time': tim } level_dict = { 'CRITICAL': 50, 'ERROR': 40, 'WARNING': 30, 'INFO': 20, 'DEBUG': 10, 'NOTSET': 0 } self.logger.log(level_dict[args.logger], ' ', extra=d) response.send_headers() if content: response.write(content) yield from response.write_eof()