def test_process_urls_maybe_valid(self): parser = command_line.build_parser() args, urls = parser.parse_known_args(["scan", "adamcaudill.com"]) with utils.capture_sys_output() as (stdout, stderr): command_line.process_urls(urls) self.assertEqual("", stderr.getvalue())
def test_process_urls_unknown_param(self): parser = command_line.build_parser() args, urls = parser.parse_known_args(["scan", "--dfghjk"]) with utils.capture_sys_output() as (stdout, stderr): command_line.process_urls(urls) self.assertIn("YAWAST Error: Invalid parameter", stderr.getvalue())
def test_process_urls_invalid_ftp(self): parser = command_line.build_parser() args, urls = parser.parse_known_args(["scan", "ftp://adamcaudill.com"]) with self.assertRaises(SystemExit): with utils.capture_sys_output() as (stdout, stderr): command_line.process_urls(urls) self.assertIn("YAWAST Error: Invalid URL Specified", stderr.getvalue())
def test_process_urls_empty(self): parser = command_line.build_parser() args, urls = parser.parse_known_args(["scan"]) with self.assertRaises(SystemExit): with utils.capture_sys_output() as (stdout, stderr): command_line.process_urls(urls) self.assertIn("YAWAST Error: You must specify at least one URL.", stderr.getvalue())
def main(): global _start_time, _monitor signal.signal(signal.SIGINT, signal_handler) warnings.simplefilter("ignore") try: if str(sys.stdout.encoding).lower() != "utf-8": print( f"Output encoding is {sys.stdout.encoding}: changing to UTF-8") sys.stdout.reconfigure(encoding="utf-8") except Exception as error: print(f"Unable to set UTF-8 encoding: {str(error)}") parser = command_line.build_parser() args, urls = parser.parse_known_args() # setup the output system output.setup(args.debug, args.nocolors, args.nowrap) output.debug("Starting application...") proxy = args.proxy if "proxy" in args else None cookie = args.cookie if "cookie" in args else None header = args.header if "header" in args else None network.init(proxy, cookie, header) # if we made it this far, it means that the parsing worked. # version doesn't require any URLs, so it gets special handing if args.command != "version": urls = command_line.process_urls(urls) else: urls = [] # we are good to keep going print_header() if args.output is not None: reporter.init(args.output) _set_basic_info() print(f"Saving output to '{reporter.get_output_file()}'") print() try: with _KeyMonitor(): with _ProcessMonitor() as pm: _monitor = pm args.func(args, urls) except KeyboardInterrupt: output.empty() output.error("Scan cancelled by user.") finally: _shutdown()
def main(): global _start_time, _monitor signal.signal(signal.SIGINT, signal_handler) parser = command_line.build_parser() args, urls = parser.parse_known_args() # setup the output system output.setup(args.debug, args.nocolors) output.debug("Starting application...") network.init(args.proxy, args.cookie) # if we made it this far, it means that the parsing worked. urls = command_line.process_urls(urls) # we are good to keep going print_header() if args.output is not None: reporter.init(args.output) _set_basic_info() print(f"Saving output to '{reporter.get_output_file()}'") print() try: with _KeyMonitor(): with _ProcessMonitor() as pm: _monitor = pm args.func(args, urls) except KeyboardInterrupt: output.empty() output.error("Scan cancelled by user.") finally: _shutdown()