def new_scan(): _start_scan_config = {} language = app.config["OWASP_NETTACKER_CONFIG"]["language"] __api_key_check(app, flask_request, language) for key in _core_default_config(): if __get_value(flask_request, key) is not None: _start_scan_config[key] = __get_value(flask_request, key) _start_scan_config = __rules( __remove_non_api_keys( _builder(_start_scan_config, _builder(_core_config(), _core_default_config()))), _core_default_config(), language) scan_id = "".join(random.choice("0123456789abcdef") for x in range(32)) scan_cmd = messages(language, 158) _start_scan_config["scan_id"] = scan_id p = multiprocessing.Process(target=__scan, args=[_start_scan_config, scan_id, scan_cmd]) p.start() return jsonify(_start_scan_config)
def check_for_requirements(start_api_server): """ check if requirements exist Returns: True if exist otherwise False """ from config import api_configuration from core.alert import messages # check external required modules api_config = api_configuration() connection_timeout = api_config["api_database_connection_timeout"] try: import pymongo import netaddr import flask del netaddr del flask except Exception: exit_failure("pip install -r requirements.txt") # check mongodb try: connection = pymongo.MongoClient( api_config["api_database"], serverSelectionTimeoutMS=connection_timeout) connection.list_database_names() except Exception: exit_failure("cannot connect to mongodb") # check if its honeypot server not api server if not start_api_server: # check docker try: subprocess.check_output(["docker", "--help"], stderr=subprocess.PIPE) except Exception: exit_failure(messages("en", "docker_error")) # check tshark try: subprocess.check_output(["tshark", "--help"], stderr=subprocess.PIPE) except Exception: exit_failure("please install tshark first!") return True
def send_submit_query(query, language): conn = create_connection(language) if not conn: return False try: for i in range(1, 100): try: c = conn.cursor() c.execute(query) conn.commit() conn.close() break except: pass time.sleep(0.01) except: warn(messages(language, 168)) return False return True
def session_set(): """ set session on the browser Returns: 200 HTTP response if session is valid and a set-cookie in the response if success otherwise abort(403) """ api_key_is_valid(app, flask_request) res = make_response( jsonify( structure( status="ok", msg=messages("browser_session_valid") ) ) ) res.set_cookie("key", value=app.config["OWASP_NETTACKER_CONFIG"]["api_access_key"]) return res
def load_all_method_args(language, API=False): """ load all ARGS method for each module Args: language: language API: API Flag (default False) Returns: all ARGS method in JSON """ module_names = [] modules_args = {} # get module names for _lib in glob(os.path.dirname(inspect.getfile(lib)) + '/*/*/engine.py'): _lib = _lib.replace('/', '.').replace('\\', '.') if '.lib.brute.' in _lib or '.lib.scan.' in _lib or '.lib.vuln.' in _lib: _lib = 'lib.' + _lib.rsplit('.lib.')[-1].rsplit('.py')[0] if _lib not in module_names: module_names.append(_lib) # get args res = "" for imodule in module_names: try: extra_requirements_dict = getattr( __import__(imodule, fromlist=['extra_requirements_dict']), 'extra_requirements_dict') except: __die_failure(messages(language, 112).format(imodule)) imodule_args = extra_requirements_dict() modules_args[imodule] = [] for imodule_arg in imodule_args: if API: res += imodule_arg + "=" + ",".join( map(str, imodule_args[imodule_arg])) + "\n" modules_args[imodule].append(imodule_arg) if API: return res for imodule in modules_args: info( imodule.rsplit('.')[2] + '_' + imodule.rsplit('.')[1] + ' --> ' + ", ".join(modules_args[imodule])) return module_names
def get_results_csv(): # todo: need to fix time format """ get host's logs through the API in JSON type Returns: an array with JSON events """ api_key_is_valid(app, flask_request) session = create_connection() result_id = get_value(flask_request, "id") if not result_id: return jsonify( structure( status="error", msg=messages("invalid_scan_id") ) ), 400 scan_details = session.query(Report).filter(Report.id == result_id).first() data = get_logs_by_scan_unique_id(scan_details.scan_unique_id) keys = data[0].keys() filename = ".".join(scan_details.report_path_filename.split('.')[:-1])[1:] + '.csv' with open(filename, "w") as report_path_filename: dict_writer = csv.DictWriter( report_path_filename, fieldnames=keys, quoting=csv.QUOTE_ALL ) dict_writer.writeheader() for event in data: dict_writer.writerow( { key: value for key, value in event.items() if key in keys } ) with open(filename, 'r') as report_path_filename: reader = report_path_filename.read() return Response( reader, mimetype='text/csv', headers={ 'Content-Disposition': 'attachment;filename=' + filename } )
def build_texttable(events): """ value['date'], value["target"], value['module_name'], value['scan_unique_id'], value['options'], value['event'] build a text table with generated event related to the scan :param events: all events :return: array [text table, event_number] """ _table = texttable.Texttable() table_headers = [ 'target', 'module_name', 'scan_unique_id', 'options', 'event', 'date' ] _table.add_rows( [ table_headers ] ) for event in events: _table.add_rows( [ table_headers, [ event['target'], event['module_name'], event['scan_unique_id'], event['options'], event['event'], event['date'] ] ] ) return _table.draw().encode('utf8') + b'\n\n' + messages("nettacker_version_details").format( version_info()[0], version_info()[1], now() ).encode('utf8') + b"\n"
def submit_report_to_db(date, scan_id, report_filename, events_num, verbose, api_flag, report_type, graph_flag, category, profile, scan_method, language, scan_cmd, ports): """ this function created to submit the generated reports into db, the files are not stored in db, just the path! Args: date: date and time scan_id: scan hash id report_filename: report full path and filename events_num: length of events in the report verbose: verbose level used to generated the report api_flag: 0 (False) if scan run from CLI and 1 (True) if scan run from API report_type: could be TEXT, JSON or HTML graph_flag: name of the graph used (if it's HTML type) category: category of the modules used in scan (vuln, scan, brute) profile: profiles used in scan scan_method: modules used in scan language: scan report language scan_cmd: scan command line if run in CLI otherwise messages(language,"through_API") ports: selected port otherwise None Returns: return True if submitted otherwise False """ info(messages(language, "inserting_report_db")) session = create_connection(language) session.add( Report(date=date, scan_id=scan_id, report_filename=report_filename, events_num=events_num, verbose=verbose, api_flag=api_flag, report_type=report_type, graph_flag=graph_flag, category=category, profile=profile, scan_method=scan_method, language=language, scan_cmd=scan_cmd, ports=ports)) return send_submit_query(session, language)
def submit_report_to_db(date, scan_id, report_filename, events_num, verbose, api_flag, report_type, graph_flag, category, profile, scan_method, language, scan_cmd, ports): info(messages(language, 169)) send_submit_query( """ INSERT INTO reports ( date, scan_id, report_filename, events_num, verbose, api_flag, report_type, graph_flag, category, profile, scan_method, language, scan_cmd, ports ) VALUES ( "{0}", "{1}", "{2}", "{3}", "{4}", "{5}", "{6}", "{7}", "{8}", "{9}", "{10}", "{11}", "{12}", "{13}" ); """.format(date, scan_id, report_filename, events_num, verbose, api_flag, report_type, graph_flag, category, profile, scan_method, language, scan_cmd, ports), language) return True
def create_connection(): """ a function to create connections to db, it retries 100 times if connection returned an error Returns: connection if success otherwise False """ try: for _ in range(0, 100): try: db_engine = create_engine( db_inputs(DB), connect_args={'check_same_thread': False}) Session = sessionmaker(bind=db_engine) session = Session() return session except Exception: time.sleep(0.01) except Exception: warn(messages("database_connect_fail")) return False
def argv_parser(): """ parse ARGVs using argparse Returns: parser, parsed ARGVs """ # create parser parser = argparse.ArgumentParser(prog="OWASP Honeypot", add_help=False) # create menu engineOpt = parser.add_argument_group(messages("en", "engine"), messages("en", "engine_input")) # add select module options + list of available modules engineOpt.add_argument("-m", "--select-module", action="store", dest="selected_modules", default=user_configuration()["default_selected_modules"], help=messages("en", "select_module").format(load_all_modules() + ["all"])) # by default all modules are selected, in case users can exclude one or some (separated with comma) engineOpt.add_argument("-x", "--exclude-module", action="store", dest="excluded_modules", default=user_configuration()["default_excluded_modules"], help=messages("en", "exclude_module").format(load_all_modules())) # limit the virtual machine storage to avoid related abuse engineOpt.add_argument("-s", "--vm-storage-limit", action="store", dest="virtual_machine_storage_limit", type=float, default=docker_configuration()["virtual_machine_storage_limit"], help=messages("en", "vm_storage_limit")) # reset the containers once in a time to prevent being continues botnet zombie engineOpt.add_argument("-r", "--vm-reset-factory-time", action="store", dest="virtual_machine_container_reset_factory_time_seconds", type=int, default=docker_configuration()["virtual_machine_container_reset_factory_time_seconds"], help=messages("en", "vm_reset_factory_time")) # start api engineOpt.add_argument("--start-api-server", action="store_true", dest="start_api_server", default=False, help="start API server") # enable verbose mode (debug mode) engineOpt.add_argument("--verbose", action="store_true", dest="verbose_mode", default=False, help="enable verbose mode") # disable color CLI engineOpt.add_argument("--disable-colors", action="store_true", dest="disable_colors", default=False, help="disable colors in CLI") # test CI/ETC engineOpt.add_argument("--test", action="store_true", dest="run_as_test", default=False, help="run a test and exit") # help menu engineOpt.add_argument("-h", "--help", action="store_true", default=False, dest="show_help_menu", help=messages("en", "show_help_menu")) return parser, parser.parse_args()
def get_result_content(): """ get a result HTML/TEXT/JSON content Returns: content of the scan result """ api_key_is_valid(app, flask_request) scan_id = get_value(flask_request, "id") if not scan_id: return jsonify( structure(status="error", msg=messages("invalid_scan_id"))), 400 filename, file_content = get_scan_result(scan_id) return Response(file_content, mimetype=mime_types().get( os.path.splitext(filename)[1], "text/plain"), headers={ 'Content-Disposition': 'attachment;filename=' + filename.split('/')[-1] })
def submit_report_to_db(date, scan_id, report_filename, events_num, verbose, api_flag, report_type, graph_flag, category, profile, scan_method, language, scan_cmd, ports): """ this function created to submit the generated reports into db, the files are not stored in db, just the path! Args: date: date and time scan_id: scan hash id report_filename: report full path and filename events_num: length of events in the report verbose: verbose level used to generated the report api_flag: 0 (False) if scan run from CLI and 1 (True) if scan run from API report_type: could be TEXT, JSON or HTML graph_flag: name of the graph used (if it's HTML type) category: category of the modules used in scan (vuln, scan, brute) profile: profiles used in scan scan_method: modules used in scan language: scan report language scan_cmd: scan command line if run in CLI otherwise messages(language,"through_API") ports: selected port otherwise None Returns: return True if submitted otherwise False """ info(messages(language, "inserting_report_db")) return send_submit_query( """ INSERT INTO reports ( date, scan_id, report_filename, events_num, verbose, api_flag, report_type, graph_flag, category, profile, scan_method, language, scan_cmd, ports ) VALUES ( "{0}", "{1}", "{2}", "{3}", "{4}", "{5}", "{6}", "{7}", "{8}", "{9}", "{10}", "{11}", "{12}", "{13}" ); """.format(date, scan_id, report_filename, events_num, verbose, api_flag, report_type, graph_flag, category, profile, scan_method, language, scan_cmd, ports), language)
def _start_api(api_host, api_port, api_debug_mode, api_access_key, api_client_white_list, api_client_white_list_ips, api_access_log, api_access_log_filename, api_cert, language): """ entry point to run the API through the flask Args: api_host: host/IP to bind address api_port: bind port api_debug_mode: debug mode api_access_key: API access key api_client_white_list: clients while list flag api_client_white_list_ips: clients white list IPs api_access_log: access log flag api_access_log_filename: access log filename language: language """ # Starting the API write_to_api_console(messages(language, "API_key").format(api_access_key)) p = multiprocessing.Process( target=__process_it, args=(api_host, api_port, api_debug_mode, api_access_key, api_client_white_list, api_client_white_list_ips, api_access_log, api_access_log_filename, api_cert, language)) p.start() # Sometimes it's take much time to terminate flask with CTRL+C # So It's better to use KeyboardInterrupt to terminate! while 1: try: exitflag = True if len(multiprocessing.active_children()) is not 0: exitflag = False time.sleep(0.3) if exitflag: break except KeyboardInterrupt: for process in multiprocessing.active_children(): process.terminate() break __die_success()
def submit_report_to_db(event): """ this function created to submit the generated reports into db, the files are not stored in db, just the path! Args: event: event log Returns: return True if submitted otherwise False """ verbose_info(messages("inserting_report_db")) session = create_connection() session.add( Report( date=event["date"], scan_unique_id=event["scan_unique_id"], report_path_filename=json.dumps( event["options"]["report_path_filename"]), options=json.dumps(event["options"]), )) return send_submit_query(session)
def __logs_to_report_html(host, language): try: logs = [] for log in send_read_query( "select host,username,password,port,type,date,description from hosts_log where host=\"{0}\"" .format(host), language): data = { "SCAN_ID": host, "HOST": log[0], "USERNAME": log[1], "PASSWORD": log[2], "PORT": log[3], "TYPE": log[4], "TIME": log[5], "DESCRIPTION": log[6] } logs.append(data) from core.log import build_graph if compatible.version() is 2: import sys reload(sys) sys.setdefaultencoding('utf8') _graph = build_graph("d3_tree_v2_graph", "en", logs, 'HOST', 'USERNAME', 'PASSWORD', 'PORT', 'TYPE', 'DESCRIPTION') from lib.html_log import _log_data _table = _log_data.table_title.format(_graph, _log_data.css_1, 'HOST', 'USERNAME', 'PASSWORD', 'PORT', 'TYPE', 'DESCRIPTION', 'TIME') for value in logs: _table += _log_data.table_items.format( value['HOST'], value['USERNAME'], value['PASSWORD'], value['PORT'], value['TYPE'], value['DESCRIPTION'], value['TIME']) _table += _log_data.table_end + '<p class="footer">' + messages("en", 93) \ .format(compatible.__version__, compatible.__code_name__, now()) + '</p>' return _table except: return ""
def create_connection(language): """ a function to create connections to db, it retries 100 times if connection returned an error Args: language: language Returns: connection if success otherwise False """ try: for i in range(0, 100): try: db_engine = create_engine(db_inputs(DB)) Session = sessionmaker(bind=db_engine) session = Session() return session except: time.sleep(0.01) except: warn(messages(language, "database_connect_fail")) return False
def send_submit_query(session): """ a function to send submit based queries to db (such as insert and update or delete), it retries 100 times if connection returned an error. Args: session: session to commit Returns: True if submitted success otherwise False """ try: for _ in range(1, 100): try: session.commit() return True except Exception: time.sleep(0.01) except Exception as _: warn(messages("database_connect_fail")) return False return False
def get_results_json(): """ get host's logs through the API in JSON type Returns: an array with JSON events """ api_key_is_valid(app, flask_request) session = create_connection() result_id = get_value(flask_request, "id") if not result_id: return jsonify( structure(status="error", msg=messages("invalid_scan_id"))), 400 scan_details = session.query(Report).filter(Report.id == result_id).first() json_object = json.dumps( get_logs_by_scan_unique_id(scan_details.scan_unique_id)) filename = ".".join( scan_details.report_path_filename.split('.')[:-1])[1:] + '.json' return Response( json_object, mimetype='application/json', headers={'Content-Disposition': 'attachment;filename=' + filename})
def start_api_server(options): """ entry point to run the API through the flask Args: options: all options """ # Starting the API write_to_api_console( messages("API_key").format(options.api_port, options.api_access_key)) p = multiprocessing.Process(target=start_api_subprocess, args=(options, )) p.start() # Sometimes it's take much time to terminate flask with CTRL+C # So It's better to use KeyboardInterrupt to terminate! while len(multiprocessing.active_children()) != 0: try: time.sleep(0.3) except KeyboardInterrupt: for process in multiprocessing.active_children(): process.terminate() break die_success()
def submit_logs_to_db(log): """ this function created to submit new events into database Args: log: log event in JSON type Returns: True if success otherwise False """ if isinstance(log, dict): session = create_connection() session.add( HostsLog(target=log["target"], date=log["date"], module_name=log["module_name"], scan_unique_id=log["scan_unique_id"], options=json.dumps(log["options"]), event=json.dumps(log["event"]))) return send_submit_query(session) else: warn(messages("invalid_json_type_to_db").format(log)) return False
def load_all_modules(): """ load all available modules Returns: an array of all module names """ # Search for Modules # the modules are available in # modules/category_name/module_name (e.g. modules/ftp/weak_password # they will be listed based on the folder names and if "Dockerfile" exist! # structure of module name: # module_name = modules/(category_name/module_name)/__init.py # example: module_name = modules/(ftp/weak_password)/__init.py # = ftp/weak_password module_names = [] module_basepath = os.path.dirname(inspect.getfile(modules)) path_pattern = module_basepath + '/*/*/__init__.py' for module in glob(path_pattern): module_dir = os.path.split(module)[0] sub_module_name = os.path.split(module_dir)[1] category_name = os.path.split(os.path.split(module_dir)[0])[1] module_name = category_name + '/' + sub_module_name dockerfile_path = os.path.join(module_dir, "Dockerfile") if os.path.exists(dockerfile_path): if module_name not in module_names: module_names.append(module_name) else: warn(messages("en", "module_not_available").format(module_name)) return module_names
def logs_to_report_html(target): """ generate HTML report with d3_tree_v2_graph for a host Args: target: the target Returns: HTML report """ from core.graph import build_graph from lib.html_log import log_data session = create_connection() logs = [{ "date": log.date, "target": log.target, "module_name": log.module_name, "scan_unique_id": log.scan_unique_id, "port": log.port, "event": log.event, "json_event": log.json_event } for log in session.query(HostsLog).filter( HostsLog.target == target).all()] html_graph = build_graph("d3_tree_v2_graph", logs) html_content = log_data.table_title.format(html_graph, log_data.css_1, 'date', 'target', 'module_name', 'scan_unique_id', 'port', 'event', 'json_event') for event in logs: html_content += log_data.table_items.format( event['date'], event["target"], event['module_name'], event['scan_unique_id'], event['port'], event['event'], event['json_event']) html_content += log_data.table_end + '<p class="footer">' + messages( "nettacker_report") + '</p>' return html_content
def _start_api(api_host, api_port, api_debug_mode, api_access_key, api_client_white_list, api_client_white_list_ips, api_access_log, api_access_log_filename, language): # Starting the API write_to_api_console(messages(language, 156).format(api_access_key)) p = multiprocessing.Process(target=__process_it, args=(api_host, api_port, api_debug_mode, api_access_key, api_client_white_list, api_client_white_list_ips, api_access_log, api_access_log_filename, language)) p.start() # Sometimes it's take much time to terminate flask with CTRL+C # So It's better to use KeyboardInterrupt to terminate! while 1: try: exitflag = True if len(multiprocessing.active_children()) is not 0: exitflag = False time.sleep(0.3) if exitflag: break except KeyboardInterrupt: for process in multiprocessing.active_children(): process.terminate() break __die_success()
def __build_texttable(JSON_FROM_DB, _HOST, _USERNAME, _PASSWORD, _PORT, _TYPE, _DESCRIPTION, _TIME, language): """ build a text table with generated event related to the scan :param JSON_FROM_DB: JSON events from database :param _HOST: host string :param _USERNAME: username string :param _PASSWORD: password string :param _PORT: port string :param _TYPE: type string :param _DESCRIPTION: description string :param _TIME: time string :param language: language :return: array [text table, event_number] """ _table = texttable.Texttable() _table.add_rows( [[_HOST, _USERNAME, _PASSWORD, _PORT, _TYPE, _DESCRIPTION, _TIME]]) events_num = 0 for value in JSON_FROM_DB: _table.add_rows( [[_HOST, _USERNAME, _PASSWORD, _PORT, _TYPE, _DESCRIPTION, _TIME], [ value['HOST'], value['USERNAME'], value['PASSWORD'], value['PORT'], value['TYPE'], value['DESCRIPTION'], value['TIME'] ]]) events_num += 1 return [ _table.draw().encode('utf8') + b'\n\n' + messages(language, "nettacker_version_details").format( compatible.__version__, compatible.__code_name__, now()).encode('utf8') + b"\n", events_num ]
def __languages(): """ define list of languages with country flag for API Returns: HTML code for each language with its country flag """ languages = list(messages(-1, 0)) res = "" flags = { "el": "gr", "fr": "fr", "en": "us", "nl": "nl", "ps": "ps", "tr": "tr", "de": "de", "ko": "kr", "it": "it", "ja": "jp", "fa": "ir", "hy": "am", "ar": "sa", "zh-cn": "cn", "vi": "vi", "ru": "ru", "hi": "in", "ur": "pk", "id": "id", "es": "es", "iw": "il" } for lang in languages: res += """<option {2} id="{0}" data-content='<span class="flag-icon flag-icon-{1}" value="{0}"></span> {0}'></option>""" \ .format(lang, flags[lang], "selected" if lang == "en" else "") return res
def test(target, retries, timeout_sec, user_agent, http_method, socks_proxy, verbose_level, trying, total_req, total, num, language): if verbose_level > 3: info(messages(language, "trying_message").format(trying, total_req, num, total, target_to_host(target), "default_port", 'dir_scan')) if socks_proxy is not None: socks_version = socks.SOCKS5 if socks_proxy.startswith( 'socks5://') else socks.SOCKS4 socks_proxy = socks_proxy.rsplit('://')[1] if '@' in socks_proxy: socks_username = socks_proxy.rsplit(':')[0] socks_password = socks_proxy.rsplit(':')[1].rsplit('@')[0] socks.set_default_proxy(socks_version, str(socks_proxy.rsplit('@')[1].rsplit(':')[0]), int(socks_proxy.rsplit(':')[-1]), username=socks_username, password=socks_password) socket.socket = socks.socksocket socket.getaddrinfo = getaddrinfo else: socks.set_default_proxy(socks_version, str( socks_proxy.rsplit(':')[0]), int(socks_proxy.rsplit(':')[1])) socket.socket = socks.socksocket socket.getaddrinfo = getaddrinfo n = 0 while 1: try: if http_method == "GET": SESSION.get(target, verify=False, timeout=timeout_sec, headers=user_agent) elif http_method == "HEAD": SESSION.head(target, verify=False, timeout=timeout_sec, headers=user_agent) return 0 except: n += 1 if n == retries: return 1
def create_connection(language): """ a function to create sqlite3 connections to db, it retries 100 times if connection returned an error Args: language: language Returns: sqlite3 connection if success otherwise False """ try: # retries for i in range(0, 100): try: return sqlite3.connect( os.path.join( os.path.dirname(os.path.dirname(__file__)), _builder(_core_config(), _core_default_config())["api_db_name"])) except: time.sleep(0.01) except: warn(messages(language, "database_connect_fail")) return False
def create_report(options, scan_unique_id): """ sort all events, create log file in HTML/TEXT/JSON and remove old logs Args: options: parsing options scan_unique_id: scan unique id Returns: True if success otherwise None """ all_scan_logs = get_logs_by_scan_unique_id(scan_unique_id) if not all_scan_logs: info(messages("no_events_for_report")) return True report_path_filename = options.report_path_filename if (len(report_path_filename) >= 5 and report_path_filename[-5:] == '.html') or (len(report_path_filename) >= 4 and report_path_filename[-4:] == '.htm'): if options.graph_name: html_graph = build_graph(options.graph_name, all_scan_logs) else: html_graph = '' from lib.html_log import log_data html_table_content = log_data.table_title.format( html_graph, log_data.css_1, 'date', 'target', 'module_name', 'scan_unique_id', 'port', 'event', 'json_event') for event in all_scan_logs: html_table_content += log_data.table_items.format( event["date"], event["target"], event["module_name"], event["scan_unique_id"], event["port"], event["event"], event["json_event"]) html_table_content += log_data.table_end + '<p class="footer">' + messages( "nettacker_version_details").format(version_info()[0], version_info()[1], now()) + '</p>' with open(report_path_filename, 'w', encoding='utf-8') as save: save.write(html_table_content + '\n') save.close() elif len(report_path_filename ) >= 5 and report_path_filename[-5:] == '.json': with open(report_path_filename, 'w', encoding='utf-8') as save: save.write(str(json.dumps(all_scan_logs)) + '\n') save.close() elif len( report_path_filename) >= 5 and report_path_filename[-4:] == '.csv': keys = all_scan_logs[0].keys() with open(report_path_filename, 'a') as csvfile: writer = csv.DictWriter(csvfile, fieldnames=keys) writer.writeheader() for log in all_scan_logs: dict_data = { key: value for key, value in log.items() if key in keys } writer.writerow(dict_data) csvfile.close() else: with open(report_path_filename, 'wb') as save: save.write(build_texttable(all_scan_logs)) save.close() submit_report_to_db({ "date": now(model=None), "scan_unique_id": scan_unique_id, "options": vars(options), }) info(messages("file_saved").format(report_path_filename)) return True
def sort_logs(log_in_file, language, graph_flag, scan_id, scan_cmd, verbose_level, api_flag, profile, scan_method, ports): """ sort all events, create log file in HTML/TEXT/JSON and remove old logs Args: log_in_file: output filename language: language graph_flag: graph name scan_id: scan hash id scan_cmd: scan cmd verbose_level: verbose level number api_flag: API flag profile: profiles scan_method: module names ports: ports Returns: True if success otherwise None """ _HOST = messages(language, 53) _USERNAME = messages(language, 54) _PASSWORD = messages(language, 55) _PORT = messages(language, 56) _TYPE = messages(language, 57) _DESCRIPTION = messages(language, 58) _TIME = messages(language, 115) events_num = 0 report_type = "" JSON_FROM_DB = __logs_by_scan_id(scan_id, language) JSON_Data = sorted(JSON_FROM_DB, key=sorted) if compatible.version() is 2: import sys reload(sys) sys.setdefaultencoding('utf8') if (len(log_in_file) >= 5 and log_in_file[-5:] == '.html') or ( len(log_in_file) >= 4 and log_in_file[-4:] == '.htm'): report_type = "HTML" data = sorted(JSON_FROM_DB, key=lambda x: sorted(x.keys())) # if user want a graph _graph = '' if graph_flag is not None: _graph = build_graph(graph_flag, language, data, 'HOST', 'USERNAME', 'PASSWORD', 'PORT', 'TYPE', 'DESCRIPTION') from lib.html_log import _log_data _css = _log_data.css_1 _table = _log_data.table_title.format(_graph, _css, _HOST, _USERNAME, _PASSWORD, _PORT, _TYPE, _DESCRIPTION, _TIME) for value in data: _table += _log_data.table_items.format( value['HOST'], value['USERNAME'], value['PASSWORD'], value['PORT'], value['TYPE'], value['DESCRIPTION'], value['TIME']) events_num += 1 _table += _log_data.table_end + '<p class="footer">' + messages(language, 93) \ .format(compatible.__version__, compatible.__code_name__, now()) + '</p>' __log_into_file(log_in_file, 'w' if type(_table) == str else 'wb', _table, language, final=True) elif len(log_in_file) >= 5 and log_in_file[-5:] == '.json': graph_flag = "" report_type = "JSON" data = json.dumps(JSON_Data) events_num = len(JSON_Data) __log_into_file(log_in_file, 'w', data, language, final=True) else: graph_flag = "" report_type = "TEXT" data = sorted(JSON_FROM_DB) _table = texttable.Texttable() _table.add_rows( [[_HOST, _USERNAME, _PASSWORD, _PORT, _TYPE, _DESCRIPTION, _TIME]]) for value in data: _table.add_rows([[ _HOST, _USERNAME, _PASSWORD, _PORT, _TYPE, _DESCRIPTION, _TIME ], [ value['HOST'], value['USERNAME'], value['PASSWORD'], value['PORT'], value['TYPE'], value['DESCRIPTION'], value['TIME'] ]]) events_num += 1 data = _table.draw().encode('utf8') + '\n\n' + messages( language, 93).format(compatible.__version__, compatible.__code_name__, now()).encode('utf8') __log_into_file(log_in_file, 'wb', data, language, final=True) info(messages(language, 167)) category = [] for sm in scan_method: if sm.rsplit("_")[-1] not in category: category.append(sm.rsplit("_")[-1]) category = ",".join(list(set(category))) scan_method = ",".join(scan_method) if ports is None: ports = "default" submit_report_to_db(now(), scan_id, log_in_file, events_num, 0 if verbose_level is 0 else 1, api_flag, report_type, graph_flag, category, profile, scan_method, language, scan_cmd, ports) info(messages(language, 171)) hosts = [] for log in JSON_Data: if log["HOST"] not in hosts: hosts.append(log["HOST"]) for host in hosts: for sm in scan_method.rsplit(','): remove_old_logs(host, sm, scan_id, language) # info(messages(language, 170)) # for log in JSON_Data: # submit_logs_to_db(language, log) return True