def restat(self, *, force: bool = False) -> None: """ Forcibly restat the file if it's stale """ # NOTE We backup and restore the key value because it's set to # None in the constructor with no way to override it if force or time.now() - self._timestamp > _RESTAT_AFTER: key = self._file.key self._file = models.File.FromFS(self.path, idm) self._file.key = key self._timestamp = time.now()
def get_logs_csv(): """ get target's logs through the API in JSON type Returns: an array with JSON events """ api_key_is_valid(app, flask_request) target = get_value(flask_request, "target") data = logs_to_report_json(target) keys = data[0].keys() filename = "report-" + now(model="%Y_%m_%d_%H_%M_%S") + "".join( random.choice(string.ascii_lowercase) for _ in range(10)) with open(filename, "w") as report_path_filename: dict_writer = csv.DictWriter(report_path_filename, fieldnames=keys, quoting=csv.QUOTE_ALL) dict_writer.writeheader() for event in data: dict_writer.writerow( {key: value for key, value in event.items() if key in keys}) with open(filename, 'r') as report_path_filename: reader = report_path_filename.read() return Response(reader, mimetype='text/csv', headers={ 'Content-Disposition': 'attachment;filename=' + filename + '.csv' })
def build_texttable(events): """ value['date'], value["target"], value['module_name'], value['scan_unique_id'], value['options'], value['event'] build a text table with generated event related to the scan :param events: all events :return: array [text table, event_number] """ _table = texttable.Texttable() table_headers = [ 'date', 'target', 'module_name', 'scan_unique_id', 'port', 'event', 'json_event' ] _table.add_rows([table_headers]) for event in events: _table.add_rows([ table_headers, [ event['date'], event['target'], event['module_name'], event['scan_unique_id'], event['port'], event['event'], event['json_event'] ] ]) return _table.draw().encode( 'utf8') + b'\n\n' + messages("nettacker_version_details").format( version_info()[0], version_info()[1], now()).encode('utf8') + b"\n"
def __init__(self, mpistat: T.Path, *bases: T.Path) -> None: """ Constructor: Set the base paths from which to start the walk. Note that any paths that are not directories, don't exist, or are children of other base paths will be skipped @param mpistat mpistat output @param bases Base paths """ if not mpistat.is_file(): raise FileNotFoundError( f"{mpistat} does not exist or is not a file") # mpistat file and its modification timestamp self._mpistat = mpistat self._timestamp = time.epoch(mpistat.stat().st_mtime) # Log a warning if forcible restat'ing is going to happen if time.now() - self._timestamp > _RESTAT_AFTER: self.log.warning( f"mpistat file is out of date; files will be forcibly restat'ed") self._vaults = { mpistatWalker._base64_prefix(vault.root): vault for vault in self._fetch_vaults(*bases) }
def access_log(response): """ if access log enabled, its writing the logs Args: response: the flask response Returns: the flask response """ if app.config["OWASP_NETTACKER_CONFIG"]["api_access_log"]: log_request = open( app.config["OWASP_NETTACKER_CONFIG"]["api_access_log"], "ab" ) log_request.write( "{0} [{1}] {2} \"{3} {4}\" {5} {6} {7}\r\n".format( flask_request.remote_addr, now(), flask_request.host, flask_request.method, flask_request.full_path, flask_request.user_agent, response.status_code, json.dumps(flask_request.form) ).encode() ) log_request.close() return response
def test_limbo_deletion_threshold_not_passed(self, vault_mock): vault_file_one = self.vault.add(Branch.Limbo, self.file_one) new_time = time.now() - config.deletion.limbo + time.delta(seconds=1) self.file_one.unlink() walk = [(self.vault, _DummyFile.FromFS(vault_file_one.path, idm, ctime=new_time, mtime=new_time, atime=new_time), VaultExc.PhysicalVaultFile("File is in Limbo"))] dummy_walker = _DummyWalker(walk) dummy_persistence = MagicMock() Sweeper(dummy_walker, dummy_persistence, True) self.assertFalse(os.path.isfile(self.file_one)) self.assertTrue(os.path.isfile(vault_file_one.path))
def error(content): """ build the error message Args: content: content of the message Returns: the message in error structure - None """ data = (color.color("red") + "[{0}][X] ".format(now()) + color.color("yellow") + content + color.color("reset") + "\n") sys.stdout.buffer.write(data.encode("utf8")) sys.stdout.flush()
def warn(content): """ build the warn message Args: content: content of the message Returns: the message in warn structure - None """ if not run_from_api(): sys.stdout.buffer.write( bytes( color.color("blue") + "[{0}][!] ".format(now()) + color.color("yellow") + content + color.color("reset") + "\n", "utf8", )) sys.stdout.flush()
def verbose_info(content): """ build the info message, log the message in database if requested, rewrite the thread temporary file Args: content: content of the message Returns: None """ if verbose_mode_is_enabled(): sys.stdout.buffer.write( bytes( color.color("yellow") + "[{0}][+] ".format(now()) + color.color("purple") + content + color.color("reset") + "\n", "utf8", )) sys.stdout.flush()
def success_event_info(content): """ build the info message, log the message in database if requested, rewrite the thread temporary file Args: content: content of the message Returns: None """ if not run_from_api(): sys.stdout.buffer.write( bytes( color.color("red") + "[{0}][+++] ".format(now()) + color.color("cyan") + content + color.color("reset") + "\n", "utf8", )) sys.stdout.flush()
def get_logs(): """ get host's logs through the API in JSON type Returns: an array with JSON events """ api_key_is_valid(app, flask_request) target = get_value(flask_request, "target") data = logs_to_report_json(target) json_object = json.dumps(data) filename = "report-" + now(model="%Y_%m_%d_%H_%M_%S") + "".join( random.choice(string.ascii_lowercase) for _ in range(10)) return Response(json_object, mimetype='application/json', headers={ 'Content-Disposition': 'attachment;filename=' + filename + '.json' })
def verbose_event_info(content): """ build the info message, log the message in database if requested, rewrite the thread temporary file Args: content: content of the message Returns: None """ if (not run_from_api()) and (verbose_mode_is_enabled() or event_verbose_mode_is_enabled() ): # prevent to stdout if run from API sys.stdout.buffer.write( bytes( color.color("yellow") + "[{0}][+] ".format(now()) + color.color("green") + content + color.color("reset") + "\n", "utf8", )) sys.stdout.flush()
def view(branch: Branch, view_mode: ViewContext, absolute: bool, idm: IDMBase.IdentityManager = idm) -> None: """ List the contents of the given branch :param branch: Which Vault branch we're going to look at :param view_mode: ViewContext.All: list all files, ViewContext.Here: list files in current directory, ViewContext.Mine: files owned by current user :param absolute: - Whether to view absolute paths or not """ cwd = file.cwd() vault = _create_vault(cwd, idm) count = 0 for path, _limbo_file in vault.list(branch): relative_path = relativise(path, cwd) if view_mode == ViewContext.Here and "/" in str(relative_path): continue elif view_mode == ViewContext.Mine and path.stat().st_uid != os.getuid( ): continue if branch == Branch.Limbo: time_to_live = config.deletion.limbo - \ (time.now() - time.epoch(_limbo_file.stat().st_mtime)) print(relative_path if absolute else relative_path, f"{round(time_to_live/time.delta(hours=1), 1)} hours", sep="\t") else: print(path if absolute else relative_path) count += 1 log.info( f"""{branch} branch of the vault in {vault.root} contains {count} files {'in the current directory' if view_mode == ViewContext.Here else 'owned by the current user' if view_mode == ViewContext.Mine else ''}""")
def test_emails_stakeholders(self): """We're going to get a file close to the threshold, and then check if the email that is generated mentions the right information """ new_time: T.DateTime = time.now() - config.deletion.threshold + \ max(config.deletion.warnings) - time.delta(seconds=1) dummy_walker = _DummyWalker([(self.vault, _DummyFile.FromFS( self.file_one, idm, ctime=new_time, mtime=new_time, atime=new_time), None)]) dummy_persistence = _DummyPersistence(config.persistence, idm) MockMailer.file_path = T.Path(self._tmp.name).resolve() / "mail" Sweeper(dummy_walker, dummy_persistence, True, MockMailer) # this will make the email # Now we'll see what it says # Nothing in all the thresholds except the largest # Nothing staged for archival def _search_file(file: T.Path, phrase: str) -> T.List[int]: """returns first line number that the phrase was found in the file""" locations: T.List[int] = [] with open(file) as f: for line_num, line in enumerate(f): if phrase in line: locations.append(line_num) return locations # The filepath should only be listed once in the whole email filepath_line_nums = _search_file( MockMailer.file_path, str(self.file_one)) self.assertEquals(len(filepath_line_nums), 1) # That should be at the bottom of all the warnings for _line_num in _search_file( MockMailer.file_path, "Your files will be DELETED"): self.assertLess(_line_num, filepath_line_nums[0])
def create_report(options, scan_unique_id): """ sort all events, create log file in HTML/TEXT/JSON and remove old logs Args: options: parsing options scan_unique_id: scan unique id Returns: True if success otherwise None """ all_scan_logs = get_logs_by_scan_unique_id(scan_unique_id) if not all_scan_logs: info(messages("no_events_for_report")) return True report_path_filename = options.report_path_filename if (len(report_path_filename) >= 5 and report_path_filename[-5:] == '.html') or (len(report_path_filename) >= 4 and report_path_filename[-4:] == '.htm'): if options.graph_name: html_graph = build_graph(options.graph_name, all_scan_logs) else: html_graph = '' from lib.html_log import log_data html_table_content = log_data.table_title.format( html_graph, log_data.css_1, 'date', 'target', 'module_name', 'scan_unique_id', 'port', 'event', 'json_event') for event in all_scan_logs: html_table_content += log_data.table_items.format( event["date"], event["target"], event["module_name"], event["scan_unique_id"], event["port"], event["event"], event["json_event"]) html_table_content += log_data.table_end + '<p class="footer">' + messages( "nettacker_version_details").format(version_info()[0], version_info()[1], now()) + '</p>' with open(report_path_filename, 'w', encoding='utf-8') as save: save.write(html_table_content + '\n') save.close() elif len(report_path_filename ) >= 5 and report_path_filename[-5:] == '.json': with open(report_path_filename, 'w', encoding='utf-8') as save: save.write(str(json.dumps(all_scan_logs)) + '\n') save.close() elif len( report_path_filename) >= 5 and report_path_filename[-4:] == '.csv': keys = all_scan_logs[0].keys() with open(report_path_filename, 'a') as csvfile: writer = csv.DictWriter(csvfile, fieldnames=keys) writer.writeheader() for log in all_scan_logs: dict_data = { key: value for key, value in log.items() if key in keys } writer.writerow(dict_data) csvfile.close() else: with open(report_path_filename, 'wb') as save: save.write(build_texttable(all_scan_logs)) save.close() submit_report_to_db({ "date": now(model=None), "scan_unique_id": scan_unique_id, "options": vars(options), }) info(messages("file_saved").format(report_path_filename)) return True
def __init__(self, file: models.File, timestamp: T.Optional[T.DateTime] = None) -> None: """ Construct from filesystem """ self._file = file self._timestamp = timestamp or time.now()
def make_file_seem_modified_long_ago(path: T.Path) -> File: long_ago = after_deletion_threshold() return _DummyFile.FromFS(path, idm, ctime=time.now(), mtime=long_ago, atime=time.now())
def make_file_seem_old_but_read_recently(path: T.Path) -> File: long_ago = after_deletion_threshold() return _DummyFile.FromFS(path, idm, ctime=long_ago, mtime=long_ago, atime=time.now())
def after_deletion_threshold() -> datetime: return time.now() - config.deletion.threshold - time.delta(seconds=1)
def age(self) -> T.TimeDelta: self.restat() return time.now() - max([self._file.mtime, self._file.atime, self._file.ctime])
def nettacker_user_application_config(): """ core framework default config (could be modify by user) Returns: a JSON with all user default configurations """ from core.compatible import version_info return { # OWASP Nettacker Default Configuration "language": "en", "verbose_mode": False, "verbose_event": False, "show_version": False, "report_path_filename": "{results_path}/results_{date_time}_{random_chars}.html".format( results_path=nettacker_paths()["results_path"], date_time=now(model="%Y_%m_%d_%H_%M_%S"), random_chars=generate_random_token(10)), "graph_name": "d3_tree_v2_graph", "show_help_menu": False, "targets": None, "targets_list": None, "selected_modules": None, "excluded_modules": None, "usernames": None, "usernames_list": None, "passwords": None, "passwords_list": None, "ports": None, "timeout": 3.0, "time_sleep_between_requests": 0.0, "scan_ip_range": False, "scan_subdomains": False, "thread_per_host": 100, "parallel_module_scan": 1, "socks_proxy": None, "retries": 1, "ping_before_scan": False, "profiles": None, "set_hardware_usage": "maximum", # low, normal, high, maximum "user_agent": "Nettacker {version_number} {version_code}".format( version_number=version_info()[0], version_code=version_info()[1]), "show_all_modules": False, "show_all_profiles": False, "modules_extra_args": None }
def process_conditions( event, module_name, target, scan_unique_id, options, response, process_number, module_thread_number, total_module_thread_number, request_number_counter, total_number_of_requests ): from core.alert import (success_event_info, verbose_info, messages) if 'save_to_temp_events_only' in event.get('response', ''): from database.db import submit_temp_logs_to_db submit_temp_logs_to_db( { "date": now(model=None), "target": target, "module_name": module_name, "scan_unique_id": scan_unique_id, "event_name": event['response']['save_to_temp_events_only'], # "options": options, "options": {}, "event": event, "data": response } ) if event['response']['conditions_results'] and 'save_to_temp_events_only' not in event.get('response', ''): from database.db import submit_logs_to_db # remove sensitive information before submitting to db from config import nettacker_api_config options = copy.deepcopy(options) for key in nettacker_api_config(): try: del options[key] except Exception: continue del event['response']['conditions'] event_request_keys = copy.deepcopy(event) del event_request_keys['response'] submit_logs_to_db( { "date": now(model=None), "target": target, "module_name": module_name, "scan_unique_id": scan_unique_id, # "options": options, "options": {}, "event": event } ) success_event_info( messages("send_success_event_from_module").format( process_number, module_name, target, module_thread_number, total_module_thread_number, request_number_counter, total_number_of_requests, ", ".join( [ "{}: {}".format( key, event_request_keys[key] ) for key in event_request_keys ] ), ", ".join(event['response']['conditions_results'].keys()) ) ) verbose_info( json.dumps(event) ) return True else: del event['response']['conditions'] verbose_info( messages("send_unsuccess_event_from_module").format( process_number, module_name, target, module_thread_number, total_module_thread_number, request_number_counter, total_number_of_requests ) ) verbose_info( json.dumps(event) ) return 'save_to_temp_events_only' in event['response']