def __init__(self, network_retries: int = DEFAULT_NETWORK_RETRIES, network_timeout: int = DEFAULT_NETWORK_TIMEOUT) -> None: """Create a scanner for running scanning commands synchronously. Args: network_retries: How many times SSLyze should retry a connection that timed out. network_timeout: The time until an ongoing connection times out. """ self._plugins_repository = PluginsRepository() # Set global network settings SslConnection.set_global_network_settings(network_retries, network_timeout)
def from_names_to_scan_commands(csv_command_names: str) -> list: # todo: it would be nice to have the config validated at the start, not at the first run of scanner # currently it's would be circular dependency, consider. if True: # a sanity check for when the number of scan commands changes plugins_repository = PluginsRepository() commands = plugins_repository.get_available_commands() if len(commands) != len(KNOWN_SCAN_COMMANDS): logger.warning( "SL0004 The number of SSLyze plugins does not match the number of hardcoded scan commands." ) if csv_command_names == "DONT_LIMIT": return list(KNOWN_SCAN_COMMANDS.values()) answer = [] command_names_arr = list( set([x.strip() for x in csv_command_names.split(",")])) for cmd_name in command_names_arr: if cmd_name in KNOWN_SCAN_COMMANDS: answer.append(KNOWN_SCAN_COMMANDS[cmd_name]) else: logger.warning("SL0005 Unknown SSLyze Scan Command") return answer
def __init__( self, network_retries: int = DEFAULT_NETWORK_RETRIES, network_timeout: int = DEFAULT_NETWORK_TIMEOUT ) -> None: """Create a scanner for running scanning commands synchronously. Args: network_retries: How many times SSLyze should retry a connection that timed out. network_timeout: The time until an ongoing connection times out. """ self._plugins_repository = PluginsRepository() # Set global network settings SslConnection.set_global_network_settings(network_retries, network_timeout)
class SynchronousScanner: """An object to run SSL scanning commands synchronously against a server. """ # Controls every socket connection done by every plugin DEFAULT_NETWORK_RETRIES = 3 DEFAULT_NETWORK_TIMEOUT = 5 # in seconds def __init__( self, network_retries: int = DEFAULT_NETWORK_RETRIES, network_timeout: int = DEFAULT_NETWORK_TIMEOUT ) -> None: """Create a scanner for running scanning commands synchronously. Args: network_retries: How many times SSLyze should retry a connection that timed out. network_timeout: The time until an ongoing connection times out. """ self._plugins_repository = PluginsRepository() # Set global network settings SslConnection.set_global_network_settings(network_retries, network_timeout) def run_scan_command( self, server_info: ServerConnectivityInfo, scan_command: PluginScanCommand ) -> PluginScanResult: """Run a single scan command against a server; will block until the scan command has been completed. Args: server_info: The server's connectivity information. The test_connectivity_to_server() method must have been called first to ensure that the server is online and accessible. scan_command: The scan command to run against this server. Returns: The result of the scan command, which will be an instance of the scan command's corresponding PluginScanResult subclass. """ plugin_class = self._plugins_repository.get_plugin_class_for_command(scan_command) plugin = plugin_class() return plugin.process_task(server_info, scan_command)
class SynchronousScanner(object): """An object to run SSL scanning commands synchronously against a server. """ # Controls every socket connection done by every plugin DEFAULT_NETWORK_RETRIES = 3 DEFAULT_NETWORK_TIMEOUT = 5 # in seconds def __init__(self, network_retries=DEFAULT_NETWORK_RETRIES, network_timeout=DEFAULT_NETWORK_TIMEOUT): # type: (int, int) -> None """Create a scanner for running scanning commands synchronously. Args: network_retries (Optional[int]): How many times SSLyze should retry a connection that timed out. network_timeout (Optional[int]): The time until an ongoing connection times out. """ self._plugins_repository = PluginsRepository() # Set global network settings SSLConnection.set_global_network_settings(network_retries, network_timeout) def run_scan_command(self, server_info, scan_command): # type: (ServerConnectivityInfo, PluginScanCommand) -> PluginScanResult """Run a single scan command against a server; will block until the scan command has been completed. Args: server_info(ServerConnectivityInfo): The server's connectivity information. The test_connectivity_to_server() method must have been called first to ensure that the server is online and accessible. scan_command (PluginScanCommand): The scan command to run against this server. Returns: PluginScanResult: The result of the scan command, which will be an instance of the scan command's corresponding PluginScanResult subclass. """ plugin_class = self._plugins_repository.get_plugin_class_for_command( scan_command) plugin = plugin_class() return plugin.process_task(server_info, scan_command)
def main() -> None: global global_scanner # For py2exe builds freeze_support() # Handle SIGINT to terminate processes signal.signal(signal.SIGINT, sigint_handler) start_time = time() plugins_repository = PluginsRepository() available_plugins = plugins_repository.get_available_plugins() available_commands = plugins_repository.get_available_commands() # Create the command line parser and the list of available options sslyze_parser = CommandLineParser(available_plugins, __version__) try: good_server_list, malformed_server_list, args_command_list = sslyze_parser.parse_command_line( ) except CommandLineParsingError as e: print(e.get_error_msg()) return output_hub = OutputHub() output_hub.command_line_parsed(available_plugins, args_command_list, malformed_server_list) # Initialize the pool of processes that will run each plugin if args_command_list.https_tunnel or args_command_list.slow_connection: # Maximum one process to not kill the proxy or the connection global_scanner = ConcurrentScanner(max_processes_nb=1) else: global_scanner = ConcurrentScanner() # Figure out which hosts are up and fill the task queue with work to do connectivity_tester = ConcurrentServerConnectivityTester(good_server_list) connectivity_tester.start_connectivity_testing() # Store and print servers we were able to connect to online_servers_list = [] for server_connectivity_info in connectivity_tester.get_reachable_servers( ): online_servers_list.append(server_connectivity_info) output_hub.server_connectivity_test_succeeded(server_connectivity_info) # Send tasks to worker processes for scan_command_class in available_commands: if getattr(args_command_list, scan_command_class.get_cli_argument()): # Get this command's optional argument if there's any optional_args = {} for optional_arg_name in scan_command_class.get_optional_arguments( ): # Was this option set ? if getattr(args_command_list, optional_arg_name): optional_args[optional_arg_name] = getattr( args_command_list, optional_arg_name) scan_command = scan_command_class( **optional_args) # type: ignore global_scanner.queue_scan_command(server_connectivity_info, scan_command) # Store and print servers we were NOT able to connect to for connectivity_exception in connectivity_tester.get_invalid_servers(): output_hub.server_connectivity_test_failed(connectivity_exception) # Keep track of how many tasks have to be performed for each target task_num = 0 output_hub.scans_started() for scan_command_class in available_commands: if getattr(args_command_list, scan_command_class.get_cli_argument()): task_num += 1 # Each host has a list of results result_dict: Dict[Text, List[PluginScanResult]] = {} # We cannot use the server_info object directly as its address will change due to multiprocessing RESULT_KEY_FORMAT = '{hostname}:{ip_address}:{port}' for server_info in online_servers_list: result_dict[RESULT_KEY_FORMAT.format(hostname=server_info.hostname, ip_address=server_info.ip_address, port=server_info.port)] = [] # Process the results as they come for plugin_result in global_scanner.get_results(): server_info = plugin_result.server_info result_dict[RESULT_KEY_FORMAT.format( hostname=server_info.hostname, ip_address=server_info.ip_address, port=server_info.port)].append(plugin_result) plugin_result_list = result_dict[RESULT_KEY_FORMAT.format( hostname=server_info.hostname, ip_address=server_info.ip_address, port=server_info.port)] if len(plugin_result_list) == task_num: # Done with this server; send the result to the output hub output_hub.server_scan_completed( CompletedServerScan(server_info, plugin_result_list)) # All done exec_time = time() - start_time output_hub.scans_completed(exec_time)
def main(): global global_scanner # For py2exe builds freeze_support() # Handle SIGINT to terminate processes signal.signal(signal.SIGINT, sigint_handler) start_time = time() plugins_repository = PluginsRepository() available_plugins = plugins_repository.get_available_plugins() available_commands = plugins_repository.get_available_commands() # Create the command line parser and the list of available options sslyze_parser = CommandLineParser(available_plugins, __version__) try: good_server_list, bad_server_list, args_command_list = sslyze_parser.parse_command_line() except CommandLineParsingError as e: print(e.get_error_msg()) return output_hub = OutputHub() output_hub.command_line_parsed(available_plugins, args_command_list) # Initialize the pool of processes that will run each plugin if args_command_list.https_tunnel: # Maximum one process to not kill the proxy global_scanner = ConcurrentScanner(args_command_list.nb_retries, args_command_list.timeout, max_processes_nb=1) else: global_scanner = ConcurrentScanner(args_command_list.nb_retries, args_command_list.timeout) # Figure out which hosts are up and fill the task queue with work to do connectivity_tester = ServersConnectivityTester(good_server_list) connectivity_tester.start_connectivity_testing(network_timeout=args_command_list.timeout) # Store and print server whose command line string was bad for failed_scan in bad_server_list: output_hub.server_connectivity_test_failed(failed_scan) # Store and print servers we were able to connect to online_servers_list = [] for server_connectivity_info in connectivity_tester.get_reachable_servers(): online_servers_list.append(server_connectivity_info) output_hub.server_connectivity_test_succeeded(server_connectivity_info) # Send tasks to worker processes for scan_command_class in available_commands: if getattr(args_command_list, scan_command_class.get_cli_argument()): # Get this command's optional argument if there's any optional_args = {} for optional_arg_name in scan_command_class.get_optional_arguments(): # Was this option set ? if getattr(args_command_list, optional_arg_name): optional_args[optional_arg_name] = getattr(args_command_list, optional_arg_name) scan_command = scan_command_class(**optional_args) global_scanner.queue_scan_command(server_connectivity_info, scan_command) # Store and print servers we were NOT able to connect to for tentative_server_info, exception in connectivity_tester.get_invalid_servers(): failed_scan = FailedServerScan(tentative_server_info.server_string, exception) output_hub.server_connectivity_test_failed(failed_scan) # Keep track of how many tasks have to be performed for each target task_num = 0 output_hub.scans_started() for scan_command_class in available_commands: if getattr(args_command_list, scan_command_class.get_cli_argument()): task_num += 1 # Each host has a list of results result_dict = {} # We cannot use the server_info object directly as its address will change due to multiprocessing RESULT_KEY_FORMAT = '{hostname}:{ip_address}:{port}' for server_info in online_servers_list: result_dict[RESULT_KEY_FORMAT.format(hostname=server_info.hostname, ip_address=server_info.ip_address, port=server_info.port)] = [] # Process the results as they come for plugin_result in global_scanner.get_results(): server_info = plugin_result.server_info result_dict[RESULT_KEY_FORMAT.format(hostname=server_info.hostname, ip_address=server_info.ip_address, port=server_info.port)].append(plugin_result) plugin_result_list = result_dict[RESULT_KEY_FORMAT.format(hostname=server_info.hostname, ip_address=server_info.ip_address, port=server_info.port)] if len(plugin_result_list) == task_num: # Done with this server; send the result to the output hub output_hub.server_scan_completed(CompletedServerScan(server_info, plugin_result_list)) # All done exec_time = time()-start_time output_hub.scans_completed(exec_time)