def save_output(spinner=None): # add some extra debug data register_info("memsize_issues", total_size(_issues)) register_info("memsize_info", total_size(_info)) register_info("memsize_data", total_size(_data)) register_info("memsize_evidence", total_size(_evidence)) register_info("gc_stats", gc.get_stats()) register_info("gc_objects", len(gc.get_objects())) if spinner: spinner.stop() print("Saving...") if spinner: spinner.start() vulns = {} for vuln in Vulnerabilities: vulns[vuln.name] = { "severity": vuln.severity, "description": vuln.description, "id": vuln.id, } data = { "_info": _convert_keys(_info), "data": _convert_keys(_data), "issues": _convert_keys(_issues), "evidence": _convert_keys(_evidence), "vulnerabilities": vulns, } json_data = json.dumps(data, indent=4) try: zf = zipfile.ZipFile(f"{_output_file}.zip", "x", zipfile.ZIP_BZIP2) with ExecutionTimer() as tm: zf.writestr( f"{os.path.basename(_output_file)}", json_data.encode("utf_8", "backslashreplace"), ) zf.close() orig = "{0:cM}".format(Size(len(json_data))) comp = "{0:cM}".format(Size(os.path.getsize(f"{_output_file}.zip"))) if spinner: spinner.stop() print( f"Saved {_output_file}.zip (size reduced from {orig} to {comp} in {tm.to_ms()}ms)" ) except Exception as error: if spinner: spinner.stop() print(f"Error writing output file: {error}")
def print_header(): start_time = time.strftime("%Y-%m-%d %H:%M:%S %Z (%z)", time.localtime()) vm = psutil.virtual_memory() mem_total = "{0:cM}".format(Size(vm.total)) mem_avail = "{0:cM}".format(Size(vm.available)) cpu_freq = psutil.cpu_freq() cpu_max = int(cpu_freq.max) if cpu_max == 0: # in this case, we don't have a real max, so go with current cpu_max = int(cpu_freq.current) print(r" .-. .- ") print(r" \ \ / / _ ") print(r" \ \ / / | | ") print(r" \ \ / / __ ___ ____ _ ___| |_ ") print(r" \ \ / / / _` \ \ /\ / / _` / __| __|") print(r" \ ` / | (_| |\ V V / (_| \__ \ |_ ") print(r" \ / \__,_| \_/\_/ \__,_|___/\__|") print(r" / / ") print(r" |`-' / ...where a pentest starts ") print(r" '..' ") print() print( f"The YAWAST Antecedent Web Application Security Toolkit (v{get_version()})" ) print( " Copyright (c) 2013 - 2020 Adam Caudill <*****@*****.**> and Contributors" ) print(" Support & Documentation: https://yawast.org") print(" News & Updates: https://twitter.com/yawast") print( f" Python {''.join(sys.version.splitlines())} ({platform.python_implementation()})" ) print(f" {ssl.OPENSSL_VERSION}") print( f" Platform: {platform.platform()} ({_get_locale()} / {sys.stdout.encoding})" ) print( f" CPU(s): {psutil.cpu_count()}@{cpu_max}MHz - RAM: {mem_total} ({mem_avail} Available)" ) output.print_color(Fore.CYAN, " " + _get_version_info()) print() print(f" Started at {start_time}") print("") print("Connection Status:") print(f" {network.check_ipv4_connection()}") print(f" {network.check_ipv6_connection()}") print()
def save_output(spinner=None): global _issues, _info, _output_file, _data if spinner: spinner.stop() print("Saving...") if spinner: spinner.start() vulns = {} for vuln in Vulnerabilities: vulns[vuln.name] = { "severity": vuln.severity, "description": vuln.description, "id": vuln.id, } data = { "_info": _convert_keys(_info), "data": _convert_keys(_data), "issues": _convert_keys(_issues), "vulnerabilities": vulns, } json_data = json.dumps(data, sort_keys=True, indent=4) try: zf = ZipFile(f"{_output_file}.zip", "x", zipfile.ZIP_BZIP2) with ExecutionTimer() as tm: zf.writestr( f"{os.path.basename(_output_file)}", json_data.encode("utf_8", "backslashreplace"), ) zf.close() orig = "{0:cM}".format(Size(len(json_data))) comp = "{0:cM}".format(Size(os.path.getsize(f"{_output_file}.zip"))) if spinner: spinner.stop() print( f"Saved {_output_file}.zip (size reduced from {orig} to {comp} in {tm.to_ms()}ms)" ) except Exception as error: if spinner: spinner.stop() print(f"Error writing output file: {error}")
def _get_info(self) -> str: # prime the call to cpu_percent, as the first call doesn't return useful data self.process.cpu_percent() # force a collection; not ideal, but seems to help gc.collect(2) # use oneshot() to cache the data, so we minimize hits with self.process.oneshot(): pct = self.process.cpu_percent() times = self.process.cpu_times() mem = self.process.memory_info() mem_res = "{0:cM}".format(Size(mem.rss)) mem_virt = "{0:cM}".format(Size(mem.vms)) if mem.rss > self.peak_mem_res: self.peak_mem_res = mem.rss output.debug(f"New high-memory threshold: {self.peak_mem_res}") thr = self.process.num_threads() vm = psutil.virtual_memory() mem_total = "{0:cM}".format(Size(vm.total)) mem_avail_bytes = vm.available mem_avail = "{0:cM}".format(Size(vm.available)) if mem_avail_bytes < self.WARNING_THRESHOLD and not self.low_mem_warning: self.low_mem_warning = True output.error(f"Low RAM Available: {mem_avail}") cons = -1 try: cons = len(self.process.connections(kind="inet")) except Exception: # we don't care if this fails output.debug_exception() cpu_freq = psutil.cpu_freq() info = (f"Process Stats: CPU: {pct}% - Sys: {times.system} - " f"User: {times.user} - Res: {mem_res} - Virt: {mem_virt} - " f"Available: {mem_avail}/{mem_total} - Threads: {thr} - " f"Connections: {cons} - CPU Freq: " f"{int(cpu_freq.current)}MHz/{int(cpu_freq.max)}MHz - " f"GC Objects: {len(gc.get_objects())}") return info
def _shutdown(): global _start_time, _monitor, _has_shutdown if _has_shutdown: return _has_shutdown = True output.debug("Shutting down...") elapsed = datetime.now() - _start_time mem_res = "{0:cM}".format(Size(_monitor.peak_mem_res)) reporter.register_info("peak_memory", _monitor.peak_mem_res) output.empty() if _monitor.peak_mem_res > 0: output.norm( f"Completed (Elapsed: {str(elapsed)} - Peak Memory: {mem_res})") else: # if we don't have memory info - likely not running in a terminal, don't print junk output.norm(f"Completed (Elapsed: {str(elapsed)})") if reporter.get_output_file() != "": with Spinner() as spinner: reporter.save_output(spinner)
def _get_info(self) -> str: from yawast.external.memory_size import Size # prime the call to cpu_percent, as the first call doesn't return useful data self.process.cpu_percent(interval=1) # use oneshot() to cache the data, so we minimize hits with self.process.oneshot(): pct = self.process.cpu_percent() times = self.process.cpu_times() mem = self._get_mem() mem_res = "{0:cM}".format(Size(mem.rss)) mem_virt = "{0:cM}".format(Size(mem.vms)) thr = self.process.num_threads() vm = psutil.virtual_memory() mem_total = "{0:cM}".format(Size(vm.total)) mem_avail_bytes = vm.available mem_avail = "{0:cM}".format(Size(vm.available)) if mem_avail_bytes < self.WARNING_THRESHOLD and not self.low_mem_warning: self.low_mem_warning = True output.error(f"Low RAM Available: {mem_avail}") cons = -1 try: cons = len(self.process.connections(kind="inet")) except Exception: # we don't care if this fails output.debug_exception() cpu_freq = psutil.cpu_freq() info = (f"Process Stats: CPU: {pct}% - Sys: {times.system} - " f"User: {times.user} - Res: {mem_res} - Virt: {mem_virt} - " f"Available: {mem_avail}/{mem_total} - Threads: {thr} - " f"Connections: {cons} - CPU Freq: " f"{int(cpu_freq.current)}MHz/{int(cpu_freq.max)}MHz") return info
def _shutdown(): global _start_time, _monitor, _has_shutdown if _has_shutdown: return _has_shutdown = True output.debug("Shutting down...") elapsed = datetime.now() - _start_time mem_res = "{0:cM}".format(Size(_monitor.peak_mem_res)) output.empty() output.norm( f"Completed (Elapsed: {str(elapsed)} - Peak Memory: {mem_res})") if reporter.get_output_file() != "": with Spinner(): reporter.save_output()