def start(self): loop = asyncio.get_event_loop() self.redis_client = loop.run_until_complete(redis_client.RedisClient.get_redis_client()) app = self.create_app(loop) host = TannerConfig.get('TANNER', 'host') port = TannerConfig.get('TANNER', 'port') web.run_app(app, host=host, port=int(port))
async def handle_event(self, request): data = await request.read() try: data = json.loads(data.decode('utf-8')) path = yarl.unquote(data['path']) except (TypeError, ValueError, KeyError) as error: self.logger.error('error parsing request: %s', data) response_msg = self._make_response(msg=type(error).__name__) else: session = await self.session_manager.add_or_update_session( data, self.redis_client) self.logger.info('Requested path %s', path) await self.dorks.extract_path(path, self.redis_client) detection = await self.base_handler.handle(data, session) session.set_attack_type(path, detection["name"]) response_msg = self._make_response( msg=dict(detection=detection, sess_uuid=session.get_uuid())) self.logger.info('TANNER response %s', response_msg) session_data = data session_data['response_msg'] = response_msg # Log to Mongo if TannerConfig.get('MONGO', 'enabled') == 'True': db = mongo_report() session_id = db.create_session(session_data) self.logger.info( "Writing session to DB: {}".format(session_id)) if TannerConfig.get('LOCALLOG', 'enabled') == 'True': lr = local_report() lr.create_session(session_data) return web.json_response(response_msg)
async def get_injection_result(self, payload): execute_result = None # Build the custom image await self.docker_helper.setup_host_image( remote_path=self.remote_path, tag='template_injection:latest') if patterns.TEMPLATE_INJECTION_TORNADO.match(payload): work_dir = TannerConfig.get('DATA', 'tornado') with open(work_dir, 'r') as f: tornado_template = f.read().format(payload) cmd = ["python3", "-c", tornado_template] execute_result = await self.docker_helper.execute_cmd( cmd, 'template_injection:latest') # Removing string "b''" from results if execute_result: execute_result = execute_result[2:-2] elif patterns.TEMPLATE_INJECTION_MAKO.match(payload): work_dir = TannerConfig.get('DATA', 'mako') with open(work_dir, 'r') as f: mako_template = f.read().format(payload) cmd = ["python3", "-c", mako_template] execute_result = await self.docker_helper.execute_cmd( cmd, 'template_injection:latest') result = dict(value=execute_result, page=True) return result
def __init__(self): self.logger = logging.getLogger('tanner.aiodocker_helper.AIODockerHelper') self.host = TannerConfig.get('ALPINE', 'hostname') self.user = TannerConfig.get('ALPINE', 'username') self.password = TannerConfig.get('ALPINE', 'password')
def start(self): loop = asyncio.get_event_loop() self.redis_client = loop.run_until_complete(redis_client.RedisClient.get_redis_client(poolsize=20)) self.api = api.Api(self.redis_client) app = self.create_app(loop) host = TannerConfig.get('API', 'host') port = int(TannerConfig.get('API', 'port')) web.run_app(app, host=host, port=port)
def start(self): loop = asyncio.get_event_loop() self.redis_client = loop.run_until_complete(redis_client.RedisClient.get_redis_client(poolsize=20)) self.api = api.Api(self.redis_client) app = self.create_app(loop) host = TannerConfig.get('WEB', 'host') port = int(TannerConfig.get('WEB', 'port')) web.run_app(app, host=host, port=port)
def start(self): loop = asyncio.get_event_loop() self.redis_client = loop.run_until_complete( redis_client.RedisClient.get_redis_client()) host = TannerConfig.get("TANNER", "host") port = TannerConfig.get("TANNER", "port") web.run_app(self.make_app(), host=host, port=port)
def __init__(self): base_dir = TannerConfig.get('EMULATORS', 'root_dir') db_name = TannerConfig.get('SQLI', 'db_name') self.session_manager = session_manager.SessionManager() self.dorks = dorks_manager.DorksManager() self.base_handler = base.BaseHandler(base_dir, db_name) self.logger = logging.getLogger(__name__) self.redis_client = None
def start(self): loop = asyncio.get_event_loop() self.redis_client = loop.run_until_complete( redis_client.RedisClient.get_redis_client(poolsize=20)) self.api = api.Api(self.redis_client) host = TannerConfig.get("WEB", "host") port = int(TannerConfig.get("WEB", "port")) web.run_app(self.make_app(), host=host, port=port)
def __init__(self, base_dir, db_name, loop=None): self.emulators = { 'rfi': rfi.RfiEmulator(base_dir, loop) if TannerConfig.get('EMULATOR_ENABLED', 'rfi') else None, 'lfi': lfi.LfiEmulator() if TannerConfig.get('EMULATOR_ENABLED', 'lfi') else None, 'xss': xss.XssEmulator() if TannerConfig.get('EMULATOR_ENABLED', 'xss') else None, 'sqli': sqli.SqliEmulator(db_name, base_dir) if TannerConfig.get('EMULATOR_ENABLED', 'sqli') else None, 'cmd_exec': cmd_exec.CmdExecEmulator() if TannerConfig.get('EMULATOR_ENABLED', 'cmd_exec') else None } self.get_emulators = ['sqli', 'rfi', 'lfi', 'xss', 'cmd_exec'] self.post_emulators = ['sqli', 'rfi', 'lfi', 'xss', 'cmd_exec'] self.cookie_emulators = ['sqli']
def start(self): loop = asyncio.get_event_loop() self.redis_client = loop.run_until_complete(redis_client.RedisClient.get_redis_client()) app = self.create_app(loop) app.on_startup.append(self.start_background_delete) app.on_cleanup.append(self.cleanup_background_tasks) host = TannerConfig.get('TANNER', 'host') port = TannerConfig.get('TANNER', 'port') web.run_app(app, host=host, port=int(port))
async def connect_to_db(self): """ Creates a aiomysql connection :return: connection object """ conn = await aiomysql.connect(host=TannerConfig.get('SQLI', 'host'), user=TannerConfig.get('SQLI', 'user'), password=TannerConfig.get( 'SQLI', 'password')) return conn
async def connect_to_db(self): """ Creates a aiomysql connection :return: connection object """ conn = await aiomysql.connect( host=TannerConfig.get("SQLI", "host"), user=TannerConfig.get("SQLI", "user"), password=TannerConfig.get("SQLI", "password"), ) return conn
async def get_redis_client(poolsize=None): redis_client = None try: host = TannerConfig.get('REDIS', 'host') port = TannerConfig.get('REDIS', 'port') if poolsize is None: poolsize = TannerConfig.get('REDIS', 'poolsize') timeout = TannerConfig.get('REDIS', 'timeout') redis_client = await asyncio.wait_for(aioredis.create_redis_pool( (host, int(port)), maxsize=int(poolsize)), timeout=int(timeout)) except asyncio.TimeoutError as timeout_error: LOGGER.exception('Problem with redis connection. Please, check your redis server. %s', timeout_error) exit() return redis_client
def start(self): loop = asyncio.get_event_loop() self.redis_client = loop.run_until_complete(redis_client.RedisClient.get_redis_client(poolsize=20)) self.api = api.Api(self.redis_client) set_auth = TannerConfig.get('API', 'auth') app = self.create_app(loop, set_auth) host = TannerConfig.get('API', 'host') port = int(TannerConfig.get('API', 'port')) if set_auth: key = generate() print('API_KEY for full access:', key) web.run_app(app, host=host, port=port)
def start(self): loop = asyncio.get_event_loop() self.redis_client = loop.run_until_complete( redis_client.RedisClient.get_redis_client(poolsize=20)) self.api = api.Api(self.redis_client) set_auth = TannerConfig.get("API", "auth") host = TannerConfig.get("API", "host") port = int(TannerConfig.get("API", "port")) if set_auth: key = generate() print("API_KEY for full access:", key) web.run_app(self.make_app(auth=set_auth), host=host, port=port)
def __init__(self): self.logger = logging.getLogger('tanner.docker_helper.DockerHelper') try: self.docker_client = docker.from_env(version='auto') except docker.errors.APIError as docker_error: self.logger.exception('Error while connecting to docker service %s', docker_error) self.host_image = TannerConfig.get('DOCKER', 'host_image')
def create_logger(debug_filename, err_filename, logger_name): if TannerConfig.get('CLEANLOG', 'enabled') == 'True': with open(err_filename, 'w'): pass logger = logging.getLogger(logger_name) logger.setLevel(logging.DEBUG) logger.propagate = False formatter = logging.Formatter(fmt='%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') # ERROR log to 'tanner.err' error_log_handler = logging.handlers.RotatingFileHandler(err_filename, encoding='utf-8') error_log_handler.setLevel(logging.ERROR) error_log_handler.setFormatter(formatter) logger.addHandler(error_log_handler) # DEBUG log to 'tanner.log' debug_log_handler = logging.handlers.RotatingFileHandler(debug_filename, encoding='utf-8') debug_log_handler.setLevel(logging.DEBUG) debug_log_handler.setFormatter(formatter) max_level_filter = LevelFilter(logging.ERROR) debug_log_handler.addFilter(max_level_filter) logger.addHandler(debug_log_handler) return logger
def __init__(self): base_dir = TannerConfig.get('EMULATORS', 'root_dir') db_name = TannerConfig.get('SQLI', 'db_name') self.session_manager = session_manager.SessionManager() self.dorks = dorks_manager.DorksManager() self.base_handler = base.BaseHandler(base_dir, db_name) self.logger = logging.getLogger(__name__) self.redis_client = None if TannerConfig.get('HPFEEDS', 'enabled') is True: self.hpf = hpfeeds_report() self.hpf.connect() if self.hpf.connected() is False: self.logger.warning('hpfeeds not connected - no hpfeeds messages will be created')
async def choose_possible_owner(self, stats): owner_names = ['user', 'tool', 'crawler', 'attacker', 'admin'] possible_owners = {k: 0.0 for k in owner_names} if stats['peer_ip'] == '127.0.0.1' or stats['peer_ip'] == '::1': possible_owners['admin'] = 1.0 with open(TannerConfig.get('DATA', 'crawler_stats')) as f: bots_owner = await self._loop.run_in_executor(None, f.read) crawler_hosts = [ 'googlebot.com', 'baiduspider', 'search.msn.com', 'spider.yandex.com', 'crawl.sogou.com' ] possible_owners['crawler'], possible_owners[ 'tool'] = await self.detect_crawler(stats, bots_owner, crawler_hosts) possible_owners['attacker'] = await self.detect_attacker( stats, bots_owner, crawler_hosts) maxcf = max([ possible_owners['crawler'], possible_owners['attacker'], possible_owners['tool'] ]) possible_owners['user'] = round(1 - maxcf, 2) owners = {k: v for k, v in possible_owners.items() if v != 0} return {'possible_owners': owners}
async def choose_possible_owner(self, stats): owner_names = ["user", "tool", "crawler", "attacker", "admin"] possible_owners = {k: 0.0 for k in owner_names} if stats["peer_ip"] == "127.0.0.1" or stats["peer_ip"] == "::1": possible_owners["admin"] = 1.0 with open(TannerConfig.get("DATA", "crawler_stats")) as f: bots_owner = await self._loop.run_in_executor(None, f.read) crawler_hosts = [ "googlebot.com", "baiduspider", "search.msn.com", "spider.yandex.com", "crawl.sogou.com" ] possible_owners["crawler"], possible_owners[ "tool"] = await self.detect_crawler(stats, bots_owner, crawler_hosts) possible_owners["attacker"] = await self.detect_attacker( stats, bots_owner, crawler_hosts) maxcf = max([ possible_owners["crawler"], possible_owners["attacker"], possible_owners["tool"] ]) possible_owners["user"] = round(1 - maxcf, 2) owners = {k: v for k, v in possible_owners.items() if v != 0} return {"possible_owners": owners}
def __init__(self): self.logger = logging.getLogger( "tanner.aiodocker_helper.AIODockerHelper") self.docker_client = aiodocker.Docker() self.host_image = TannerConfig.get("DOCKER", "host_image")
def __init__(self, db_name, working_dir): if TannerConfig.get('SQLI', 'type') == 'MySQL': self.sqli_emulator = mysqli.MySQLIEmulator(db_name) else: self.sqli_emulator = sqlite.SQLITEEmulator(db_name, working_dir) self.query_map = None
def create_logger(debug_filename, err_filename, logger_name): if TannerConfig.get("CLEANLOG", "enabled") == "True": with open(err_filename, "w"): pass logger = logging.getLogger(logger_name) logger.setLevel(logging.DEBUG) logger.propagate = False formatter = logging.Formatter( fmt="%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s", datefmt="%Y-%m-%d %H:%M:%S" ) # ERROR log to 'tanner.err' error_log_handler = logging.handlers.RotatingFileHandler(err_filename, encoding="utf-8") error_log_handler.setLevel(logging.ERROR) error_log_handler.setFormatter(formatter) logger.addHandler(error_log_handler) # DEBUG log to 'tanner.log' debug_log_handler = logging.handlers.RotatingFileHandler(debug_filename, encoding="utf-8") debug_log_handler.setLevel(logging.DEBUG) debug_log_handler.setFormatter(formatter) max_level_filter = LevelFilter(logging.ERROR) debug_log_handler.addFilter(max_level_filter) logger.addHandler(debug_log_handler) return logger
def create_logger(debug_filename, err_filename, logger_name): if TannerConfig.get('CLEANLOG', 'enabled') == 'True': with open(err_filename, 'w'): pass logger = logging.getLogger(logger_name) logger.setLevel(logging.DEBUG) logger.propagate = False formatter = logging.Formatter( fmt='%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s', datefmt='%Y-%m-%d %H:%M') # ERROR log to 'tanner.err' error_log_handler = logging.handlers.RotatingFileHandler( err_filename, encoding='utf-8') error_log_handler.setLevel(logging.ERROR) error_log_handler.setFormatter(formatter) logger.addHandler(error_log_handler) # DEBUG log to 'tanner.log' debug_log_handler = logging.handlers.RotatingFileHandler( debug_filename, encoding='utf-8') debug_log_handler.setLevel(logging.DEBUG) debug_log_handler.setFormatter(formatter) max_level_filter = LevelFilter(logging.ERROR) debug_log_handler.addFilter(max_level_filter) logger.addHandler(debug_log_handler) return logger
async def get_emulation_result(self, session, data, target_emulators): """ Return emulation result for the vulnerabilty of highest order :param session (Session object): Current active session :param data (MultiDictProxy object): Data to be checked :param target_emulator (list): Emulators against which data is to be checked :return: A dict object containing name, order and paylod to be injected for vulnerability """ detection = dict(name="unknown", order=0) attack_params = {} for param_id, param_value in data.items(): for emulator in target_emulators: if TannerConfig.get("EMULATOR_ENABLED", emulator): possible_detection = self.emulators[emulator].scan( param_value) if param_value else None if possible_detection: if detection["order"] < possible_detection["order"]: detection = possible_detection if emulator not in attack_params: attack_params[emulator] = [] attack_params[emulator].append( dict(id=param_id, value=param_value)) if detection["name"] in self.emulators: emulation_result = await self.emulators[detection["name"]].handle( attack_params[detection["name"]], session) if emulation_result: detection["payload"] = emulation_result return detection
def __init__(self, db_name, working_dir): if (TannerConfig.get('SQLI', 'type') == 'MySQL'): self.sqli_emulator = mysqli.MySQLIEmulator(db_name) else: self.sqli_emulator = sqlite.SQLITEEmulator(db_name, working_dir) self.query_map = None
async def choose_possible_owner(self, stats): owner_names = ['user', 'tool', 'crawler', 'attacker'] possible_owners = {k: 0.0 for k in owner_names} attacks = { 'sqli', 'rfi', 'lfi', 'xss', 'php_code_injection', 'cmd_exec', 'crlf' } with open(TannerConfig.get('DATA', 'crawler_stats')) as f: bots_owner = await self._loop.run_in_executor(None, f.read) crawler_hosts = [ 'googlebot.com', 'baiduspider', 'search.msn.com', 'spider.yandex.com', 'crawl.sogou.com' ] possible_owners['crawler'], possible_owners[ 'tool'] = await self.detect_crawler(stats, bots_owner, crawler_hosts) possible_owners['attacker'] = await self.detect_attacker( stats, bots_owner, crawler_hosts, attacks) maxcf = max([ possible_owners['crawler'], possible_owners['attacker'], possible_owners['tool'] ]) possible_owners['user'] = round(1 - maxcf, 2) owners = {k: v for k, v in possible_owners.items() if v != 0} return {'possible_owners': owners}
def __init__(self): self.logger = logging.getLogger( 'tanner.aiodocker_helper.AIODockerHelper') self.docker_client = aiodocker.Docker() self.host_image = TannerConfig.get('DOCKER', 'host_image')
def __init__(self, db_name, working_dir): self.logger = logging.getLogger("tanner.sqli_emulator") if TannerConfig.get("SQLI", "type") == "MySQL": self.sqli_emulator = mysqli.MySQLIEmulator(db_name) else: self.sqli_emulator = sqlite.SQLITEEmulator(db_name, working_dir) self.query_map = None
def __init__(self, db_name, working_dir): self.logger = logging.getLogger('tanner.sqli_emulator') if TannerConfig.get('SQLI', 'type') == 'MySQL': self.sqli_emulator = mysqli.MySQLIEmulator(db_name) else: self.sqli_emulator = sqlite.SQLITEEmulator(db_name, working_dir) self.query_map = None
def read_config(self): with open(TannerConfig.get('DATA', 'db_config')) as db_config: try: config = json.load(db_config) except json.JSONDecodeError as json_error: self.logger.info('Failed to load json: %s', json_error) else: return config
async def get_redis_client(poolsize=None): redis_client = None try: host = TannerConfig.get('REDIS', 'host') port = TannerConfig.get('REDIS', 'port') if poolsize is None: poolsize = TannerConfig.get('REDIS', 'poolsize') timeout = TannerConfig.get('REDIS', 'timeout') redis_client = await asyncio.wait_for(asyncio_redis.Pool.create( host=host, port=int(port), poolsize=int(poolsize)), timeout=int(timeout)) except asyncio.TimeoutError as timeout_error: LOGGER.error( 'Problem with redis connection. Please, check your redis server. %s', timeout_error) exit() return redis_client
def __init__(self): try: self.docker_client = docker.from_env(version='auto') except docker.errors.APIError as docker_error: self.logger.error('Error while connecting to docker service %s', docker_error) self.host_image = TannerConfig.get('DOCKER', 'host_image') self.logger = logging.getLogger('tanner.docker_helper.DockerHelper')
async def auth(self, request, handler): resp = await handler(request) auth_key = request.query.get('key') try: decoded = jwt.decode(auth_key, TannerConfig.get('API', 'auth_signature'), algorithm='HS256') except (DecodeError, InvalidSignatureError): return web.Response(body='401: Unauthorized') return resp
async def copy_db(self, user_db, attacker_db): db_exists = await self.check_db_exists(attacker_db) if db_exists: self.logger.info('Attacker db already exists') else: # create new attacker db conn = await self.connect_to_db() cursor = await conn.cursor() await cursor.execute('CREATE DATABASE {db_name}'.format(db_name=attacker_db)) conn.close() # copy user db to attacker db dump_db_cmd = 'mysqldump -h {host} -u {user} -p{password} {db_name}' restore_db_cmd = 'mysql -h {host} -u {user} -p{password} {db_name}' dump_db_cmd = dump_db_cmd.format(host=TannerConfig.get('SQLI', 'host'), user=TannerConfig.get('SQLI', 'user'), password=TannerConfig.get('SQLI', 'password'), db_name=user_db ) restore_db_cmd = restore_db_cmd.format(host=TannerConfig.get('SQLI', 'host'), user=TannerConfig.get('SQLI', 'user'), password=TannerConfig.get('SQLI', 'password'), db_name=attacker_db ) try: dump_db_process = subprocess.Popen(dump_db_cmd, stdout=subprocess.PIPE, shell=True) restore_db_process = subprocess.Popen(restore_db_cmd, stdin=dump_db_process.stdout, shell=True) dump_db_process.stdout.close() dump_db_process.wait() restore_db_process.wait() except subprocess.CalledProcessError as e: self.logger.exception('Error during copying sql database : %s' % e) return attacker_db
async def handle_event(self, request): data = await request.read() try: data = json.loads(data.decode('utf-8')) path = yarl.URL(data['path']).human_repr() except (TypeError, ValueError, KeyError) as error: self.logger.exception('error parsing request: %s', data) response_msg = self._make_response(msg=type(error).__name__) else: session = await self.session_manager.add_or_update_session( data, self.redis_client ) self.logger.info('Requested path %s', path) await self.dorks.extract_path(path, self.redis_client) detection = await self.base_handler.handle(data, session) session.set_attack_type(path, detection["name"]) response_msg = self._make_response(msg=dict(detection=detection, sess_uuid=session.get_uuid())) self.logger.info('TANNER response %s', response_msg) session_data = data session_data['response_msg'] = response_msg # Log to Mongo if TannerConfig.get('MONGO', 'enabled') is True: db = mongo_report() session_id = db.create_session(session_data) self.logger.info("Writing session to DB: {}".format(session_id)) # Log to hpfeeds if TannerConfig.get('HPFEEDS', 'enabled') is True: if self.hpf.connected(): self.hpf.create_session(session_data) if TannerConfig.get('LOCALLOG', 'enabled') is True: lr = local_report() lr.create_session(session_data) return web.json_response(response_msg)
def find_location(ip): reader = Reader(TannerConfig.get('DATA', 'geo_db')) try: location = reader.city(ip) info = dict( country=location.country.name, country_code=location.country.iso_code, city=location.city.name, zip_code=location.postal.code, ) except geoip2.errors.AddressNotFoundError: info = "NA" # When IP doesn't exist in the db, set info as "NA - Not Available" return info
async def choose_possible_owner(self, stats): owner_names = ['user', 'tool', 'crawler', 'attacker'] possible_owners = {k: 0.0 for k in owner_names} attacks = {'sqli', 'rfi', 'lfi', 'xss', 'php_code_injection', 'cmd_exec', 'crlf'} with open(TannerConfig.get('DATA', 'crawler_stats')) as f: bots_owner = await self._loop.run_in_executor(None, f.read) crawler_hosts = ['googlebot.com', 'baiduspider', 'search.msn.com', 'spider.yandex.com', 'crawl.sogou.com'] possible_owners['crawler'], possible_owners['tool'] = await self.detect_crawler( stats, bots_owner, crawler_hosts ) possible_owners['attacker'] = await self.detect_attacker( stats, bots_owner, crawler_hosts, attacks ) maxcf = max([possible_owners['crawler'], possible_owners['attacker'], possible_owners['tool']]) possible_owners['user'] = round(1 - maxcf, 2) owners = {k: v for k, v in possible_owners.items() if v != 0} return {'possible_owners': owners}
async def connect_to_db(self): conn = await aiomysql.connect(host=TannerConfig.get('SQLI', 'host'), user=TannerConfig.get('SQLI', 'user'), password=TannerConfig.get('SQLI', 'password') ) return conn
async def remove_associated_db(self): if TannerConfig.get('SQLI', 'type') == 'MySQL': await MySQLDBHelper().delete_db(self.associated_db) else: SQLITEDBHelper().delete_db(self.associated_db)