def test_authenticate(self, mock_UserPassCredentials): azure_authentication_strategy = get_authentication_strategy("azure") result = azure_authentication_strategy.authenticate( user_account=True, username='******', password='******' ) mock_UserPassCredentials.assert_called_with('some-username', 'some-password', resource='https://graph.windows.net') assert isinstance(result, AzureCredentials) # exception test with pytest.raises(AuthenticationException): result = azure_authentication_strategy.authenticate(None, None, None, None)
async def _run(provider, # AWS profile, aws_access_key_id, aws_secret_access_key, aws_session_token, # Azure user_account, service_account, cli, msi, service_principal, file_auth, tenant_id, subscription_id, client_id, client_secret, username, password, # GCP project_id, folder_id, organization_id, all_projects, # Aliyun access_key_id, access_key_secret, # General report_name, report_dir, timestamp, services, skipped_services, result_format, database_name, host_ip, host_port, regions, excluded_regions, fetch_local, update, ip_ranges, ip_ranges_name_key, ruleset, exceptions, force_write, debug, quiet, log_file, no_browser, programmatic_execution, **kwargs): """ Run a scout job. """ # Configure the debug level set_logger_configuration(debug, quiet, log_file) print_info('Launching Scout') print_info('Authenticating to cloud provider') auth_strategy = get_authentication_strategy(provider) try: credentials = auth_strategy.authenticate(profile=profile, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, user_account=user_account, service_account=service_account, cli=cli, msi=msi, service_principal=service_principal, file_auth=file_auth, tenant_id=tenant_id, subscription_id=subscription_id, client_id=client_id, client_secret=client_secret, username=username, password=password, access_key_id=access_key_id, access_key_secret=access_key_secret, programmatic_execution=programmatic_execution) if not credentials: return 101 except Exception as e: print_exception('Authentication failure: {}'.format(e)) return 101 # Create a cloud provider object cloud_provider = get_provider(provider=provider, profile=profile, project_id=project_id, folder_id=folder_id, organization_id=organization_id, all_projects=all_projects, report_dir=report_dir, timestamp=timestamp, services=services, skipped_services=skipped_services, credentials=credentials) # Create a new report report_name = report_name if report_name else cloud_provider.get_report_name() report = ScoutReport(cloud_provider.provider_code, report_name, report_dir, timestamp, result_format=result_format) if database_name: database_file, _ = get_filename('RESULTS', report_name, report_dir, file_extension="db") Server.init(database_file, host_ip, host_port) return # Complete run, including pulling data from provider if not fetch_local: # Fetch data from provider APIs try: print_info('Gathering data from APIs') await cloud_provider.fetch(regions=regions, excluded_regions=excluded_regions) except KeyboardInterrupt: print_info('\nCancelled by user') return 130 # Update means we reload the whole config and overwrite part of it if update: print_info('Updating existing data') current_run_services = copy.deepcopy(cloud_provider.services) last_run_dict = report.encoder.load_from_file('RESULTS') cloud_provider.services = last_run_dict['services'] for service in cloud_provider.service_list: cloud_provider.services[service] = current_run_services[service] # Partial run, using pre-pulled data else: print_info('Using local data') # Reload to flatten everything into a python dictionary last_run_dict = report.encoder.load_from_file('RESULTS') for key in last_run_dict: setattr(cloud_provider, key, last_run_dict[key]) # Pre processing cloud_provider.preprocessing( ip_ranges, ip_ranges_name_key) # Analyze config print_info('Running rule engine') finding_rules = Ruleset(cloud_provider=cloud_provider.provider_code, environment_name=cloud_provider.environment, filename=ruleset, ip_ranges=ip_ranges, account_id=cloud_provider.account_id) processing_engine = ProcessingEngine(finding_rules) processing_engine.run(cloud_provider) # Create display filters print_info('Applying display filters') filter_rules = Ruleset(cloud_provider=cloud_provider.provider_code, environment_name=cloud_provider.environment, rule_type='filters', account_id=cloud_provider.account_id) processing_engine = ProcessingEngine(filter_rules) processing_engine.run(cloud_provider) # Handle exceptions if exceptions: print_info('Applying exceptions') try: exceptions = RuleExceptions(exceptions) exceptions.process(cloud_provider) exceptions = exceptions.exceptions except Exception as e: print_exception('Failed to load exceptions: {}'.format(e)) exceptions = {} else: exceptions = {} run_parameters = { 'services': services, 'skipped_services': skipped_services, 'regions': regions, 'excluded_regions': excluded_regions, } # Finalize cloud_provider.postprocessing(report.current_time, finding_rules, run_parameters) # Save config and create HTML report html_report_path = report.save( cloud_provider, exceptions, force_write, debug) # Open the report by default if not no_browser: print_info('Opening the HTML report') url = 'file://%s' % os.path.abspath(html_report_path) webbrowser.open(url, new=2) if ERRORS_LIST: # errors were handled during execution return 200 else: return 0
def test_authenticate(self, mock_get_caller_identity, mock_boto3): aws_authentication_strategy = get_authentication_strategy("aws") boto3_session = Object() boto3_session._session = Object() mock_boto3.Session.return_value = boto3_session test_cases = [ # no params { "profile": None, "aws_access_key_id": None, "aws_secret_access_key": None, "aws_session_token": None, "call_dict": {}, }, # profile { "profile": "123", "aws_access_key_id": None, "aws_secret_access_key": None, "aws_session_token": None, "call_dict": {"profile_name": "123"}, }, # access and secret key { "profile": None, "aws_access_key_id": "456", "aws_secret_access_key": "789", "aws_session_token": None, "call_dict": { "aws_access_key_id": "456", "aws_secret_access_key": "789", }, }, # access, secret key and token { "profile": None, "aws_access_key_id": "456", "aws_secret_access_key": "789", "aws_session_token": "101112", "call_dict": { "aws_access_key_id": "456", "aws_secret_access_key": "789", "aws_session_token": "101112", }, }, ] for test_case in test_cases: result = aws_authentication_strategy.authenticate( test_case["profile"], test_case["aws_access_key_id"], test_case["aws_secret_access_key"], test_case["aws_session_token"], ) mock_boto3.Session.assert_called_with(**test_case["call_dict"]) mock_get_caller_identity.assert_called_with(boto3_session) assert isinstance(result, AWSCredentials) assert result.session == boto3_session # exception test mock_boto3.Session.side_effect = Exception("an exception") with pytest.raises(AuthenticationException): result = aws_authentication_strategy.authenticate(None, None, None, None)
async def _run(provider, # AWS profile, aws_access_key_id, aws_secret_access_key, aws_session_token, # Azure user_account, service_account, cli, msi, service_principal, file_auth, tenant_id, subscription_id, client_id, client_secret, username, password, # GCP project_id, folder_id, organization_id, all_projects, timestamp, services, skipped_services, max_workers, regions, excluded_regions, fetch_local, update, max_rate, ip_ranges, ip_ranges_name_key, ruleset, exceptions, force_write, debug, quiet, no_browser, programmatic_execution, **kwargs): """ Run a scout job. """ auth_strategy = get_authentication_strategy(provider) try: credentials = auth_strategy.authenticate(profile=profile, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, user_account=user_account, service_account=service_account, cli=cli, msi=msi, service_principal=service_principal, file_auth=file_auth, tenant_id=tenant_id, subscription_id=subscription_id, client_id=client_id, client_secret=client_secret, username=username, password=password, access_key_id=None, access_key_secret=None, programmatic_execution=programmatic_execution) if not credentials: return {'error': "Credentials failed"} except Exception as e: print_exception('Authentication failure: {}'.format(e)) return {'error': f"Exception {e}"} # Create a cloud provider object cloud_provider = get_provider(provider=provider, profile=profile, project_id=project_id, folder_id=folder_id, organization_id=organization_id, all_projects=all_projects, report_dir=None, timestamp=timestamp, services=services, skipped_services=skipped_services, credentials=credentials) # Create a new report report_name = cloud_provider.get_report_name() report = ScoutReport(cloud_provider.provider_code, report_name, './', timestamp, result_format='json') # Complete run, including pulling data from provider if not fetch_local: # Fetch data from provider APIs try: # Gathering data from APIs await cloud_provider.fetch(regions=regions, excluded_regions=excluded_regions) except KeyboardInterrupt: # nCancelled by user return 130 # Update means we reload the whole config and overwrite part of it if update: # Updating existing data current_run_services = copy.deepcopy(cloud_provider.services) last_run_dict = report.encoder.load_from_file('RESULTS') cloud_provider.services = last_run_dict['services'] for service in cloud_provider.service_list: cloud_provider.services[service] = current_run_services[service] # Partial run, using pre-pulled data else: # Using local data # Reload to flatten everything into a python dictionary last_run_dict = report.encoder.load_from_file('RESULTS') for key in last_run_dict: setattr(cloud_provider, key, last_run_dict[key]) # Pre processing cloud_provider.preprocessing( ip_ranges, ip_ranges_name_key) # Analyze config # Running rule engine finding_rules = Ruleset(cloud_provider=cloud_provider.provider_code, environment_name=cloud_provider.environment, filename=ruleset, ip_ranges=ip_ranges, account_id=cloud_provider.account_id) processing_engine = ProcessingEngine(finding_rules) processing_engine.run(cloud_provider) # Create display filters # Applying display filters filter_rules = Ruleset(cloud_provider=cloud_provider.provider_code, environment_name=cloud_provider.environment, rule_type='filters', account_id=cloud_provider.account_id) processing_engine = ProcessingEngine(filter_rules) processing_engine.run(cloud_provider) # Handle exceptions if exceptions: # Applying exceptions try: exceptions = RuleExceptions(exceptions) exceptions.process(cloud_provider) except Exception as e: print_exception('Failed to load exceptions: {}'.format(e)) run_parameters = { 'services': services, 'skipped_services': skipped_services, 'regions': regions, 'excluded_regions': excluded_regions, } # Finalize cloud_provider.postprocessing(report.current_time, finding_rules, run_parameters) cloud_provider.credentials = None return cloud_provider
async def run_scan(args): # Configure the debug level set_config_debug_level(args.get('debug')) print_info('Launching Scout') credentials = None if not args.get('fetch_local'): auth_strategy = get_authentication_strategy(args.get('provider')) credentials = auth_strategy.authenticate( profile=args.get('profile'), user_account=args.get('user_account'), service_account=args.get('service_account'), cli=args.get('cli'), msi=args.get('msi'), service_principal=args.get('service_principal'), file_auth=args.get('file_auth'), tenant_id=args.get('tenant_id'), subscription_id=args.get('subscription_id'), client_id=args.get('client_id'), client_secret=args.get('client_secret'), username=args.get('username'), password=args.get('password')) if not credentials: return 401 # Create a cloud provider object cloud_provider = get_provider( provider=args.get('provider'), profile=args.get('profile'), project_id=args.get('project_id'), folder_id=args.get('folder_id'), organization_id=args.get('organization_id'), all_projects=args.get('all_projects'), report_dir=args.get('report_dir'), timestamp=args.get('timestamp'), services=args.get('services'), skipped_services=args.get('skipped_services'), thread_config=args.get('thread_config'), credentials=credentials) report_file_name = generate_report_name(cloud_provider.provider_code, args) # TODO: move this to after authentication, so that the report can be more specific to what's being scanned. # For example if scanning with a GCP service account, the SA email can only be known after authenticating... # Create a new report report = Scout2Report(args.get('provider'), report_file_name, args.get('report_dir'), args.get('timestamp')) # Complete run, including pulling data from provider if not args.get('fetch_local'): # Fetch data from provider APIs try: print_info('Gathering data from APIs') await cloud_provider.fetch(regions=args.get('regions')) except KeyboardInterrupt: print_info('\nCancelled by user') return 130 # Update means we reload the whole config and overwrite part of it if args.get('update'): print_info('Updating existing data') current_run_services = copy.deepcopy(cloud_provider.services) last_run_dict = report.jsrw.load_from_file(DEFAULT_RESULT_FILE) cloud_provider.services = last_run_dict['services'] for service in cloud_provider.service_list: cloud_provider.services[service] = current_run_services[ service] # Partial run, using pre-pulled data else: print_info('Using local data') # Reload to flatten everything into a python dictionary last_run_dict = report.jsrw.load_from_file(DEFAULT_RESULT_FILE) for key in last_run_dict: setattr(cloud_provider, key, last_run_dict[key]) # Pre processing cloud_provider.preprocessing(args.get('ip_ranges'), args.get('ip_ranges_name_key')) # Analyze config print_info('Running rule engine') finding_rules = Ruleset(environment_name=args.get('profile'), cloud_provider=args.get('provider'), filename=args.get('ruleset'), ip_ranges=args.get('ip_ranges'), aws_account_id=cloud_provider.aws_account_id) processing_engine = ProcessingEngine(finding_rules) processing_engine.run(cloud_provider) # Create display filters print_info('Applying display filters') filter_rules = Ruleset(cloud_provider=args.get('provider'), filename='filters.json', rule_type='filters', aws_account_id=cloud_provider.aws_account_id) processing_engine = ProcessingEngine(filter_rules) processing_engine.run(cloud_provider) if args.get('exceptions')[0]: print_info('Applying exceptions') try: exceptions = RuleExceptions(args.get('profile'), args.get('exceptions')[0]) exceptions.process(cloud_provider) exceptions = exceptions.exceptions except Exception as e: print_debug( 'Failed to load exceptions. The file may not exist or may have an invalid format.' ) exceptions = {} else: exceptions = {} # Handle exceptions try: exceptions = RuleExceptions(args.get('profile'), args.get('exceptions')[0]) exceptions.process(cloud_provider) exceptions = exceptions.exceptions except Exception as e: print_debug( 'Warning, failed to load exceptions. The file may not exist or may have an invalid format.' ) exceptions = {} # Finalize cloud_provider.postprocessing(report.current_time, finding_rules) # Save config and create HTML report html_report_path = report.save(cloud_provider, exceptions, args.get('force_write'), args.get('debug')) # Open the report by default if not args.get('no_browser'): print_info('Opening the HTML report') url = 'file://%s' % os.path.abspath(html_report_path) webbrowser.open(url, new=2) return 0
async def _run( provider, # AWS profile, aws_access_key_id, aws_secret_access_key, aws_session_token, # Azure cli, user_account, user_account_browser, msi, service_principal, file_auth, tenant_id, subscription_ids, all_subscriptions, client_id, client_secret, username, password, # GCP service_account, project_id, folder_id, organization_id, all_projects, # Aliyun access_key_id, access_key_secret, # General report_name, report_dir, timestamp, services, skipped_services, list_services, result_format, database_name, host_ip, host_port, regions, excluded_regions, fetch_local, update, ip_ranges, ip_ranges_name_key, ruleset, exceptions, force_write, debug, quiet, log_file, no_browser, programmatic_execution, **kwargs): """ Run a scout job. """ # Configure the debug level set_logger_configuration(debug, quiet, log_file) print_info('Launching Scout') print_info('Authenticating to cloud provider') auth_strategy = get_authentication_strategy(provider) try: credentials = auth_strategy.authenticate( profile=profile, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, user_account=user_account, user_account_browser=user_account_browser, service_account=service_account, cli=cli, msi=msi, service_principal=service_principal, file_auth=file_auth, tenant_id=tenant_id, client_id=client_id, client_secret=client_secret, username=username, password=password, access_key_id=access_key_id, access_key_secret=access_key_secret) if not credentials: return 101 except Exception as e: print_exception(f'Authentication failure: {e}') return 101 # Create a cloud provider object try: cloud_provider = get_provider( provider=provider, # AWS profile=profile, # Azure subscription_ids=subscription_ids, all_subscriptions=all_subscriptions, # GCP project_id=project_id, folder_id=folder_id, organization_id=organization_id, all_projects=all_projects, # Other report_dir=report_dir, timestamp=timestamp, services=services, skipped_services=skipped_services, programmatic_execution=programmatic_execution, credentials=credentials) except Exception as e: print_exception(f'Initialization failure: {e}') return 102 # Create a new report try: report_name = report_name if report_name else cloud_provider.get_report_name( ) report = ScoutReport(cloud_provider.provider_code, report_name, report_dir, timestamp, result_format=result_format) if database_name: database_file, _ = get_filename('RESULTS', report_name, report_dir, file_extension="db") Server.init(database_file, host_ip, host_port) return except Exception as e: print_exception('Report initialization failure: {}'.format(e)) return 103 # If this command, run and exit if list_services: available_services = [ x for x in dir(cloud_provider.services) if not (x.startswith('_') or x in ['credentials', 'fetch']) ] print_info('The available services are: "{}"'.format( '", "'.join(available_services))) return 0 # Complete run, including pulling data from provider if not fetch_local: # Fetch data from provider APIs try: print_info('Gathering data from APIs') await cloud_provider.fetch(regions=regions, excluded_regions=excluded_regions) except KeyboardInterrupt: print_info('\nCancelled by user') return 130 except Exception as e: print_exception( 'Unhandled exception thrown while gathering data: {}'.format( e)) return 104 # Update means we reload the whole config and overwrite part of it if update: try: print_info('Updating existing data') #Load previous results last_run_dict = report.encoder.load_from_file('RESULTS') #Get list of previous services which were not updated during this run previous_services = [ prev_service for prev_service in last_run_dict['service_list'] if prev_service not in cloud_provider.service_list ] #Add previous services for service in previous_services: cloud_provider.service_list.append(service) cloud_provider.services[service] = last_run_dict[ 'services'][service] except Exception as e: print_exception('Failure while updating report: {}'.format(e)) # Partial run, using pre-pulled data else: try: print_info('Using local data') # Reload to flatten everything into a python dictionary last_run_dict = report.encoder.load_from_file('RESULTS') for key in last_run_dict: setattr(cloud_provider, key, last_run_dict[key]) except Exception as e: print_exception('Failure while updating report: {}'.format(e)) # Pre processing try: print_info('Running pre-processing engine') cloud_provider.preprocessing(ip_ranges, ip_ranges_name_key) except Exception as e: print_exception( 'Failure while running pre-processing engine: {}'.format(e)) return 105 # Analyze config try: print_info('Running rule engine') finding_rules = Ruleset(cloud_provider=cloud_provider.provider_code, environment_name=cloud_provider.environment, filename=ruleset, ip_ranges=ip_ranges, account_id=cloud_provider.account_id) processing_engine = ProcessingEngine(finding_rules) processing_engine.run(cloud_provider) except Exception as e: print_exception('Failure while running rule engine: {}'.format(e)) return 106 # Create display filters try: print_info('Applying display filters') filter_rules = Ruleset(cloud_provider=cloud_provider.provider_code, environment_name=cloud_provider.environment, filename='filters.json', rule_type='filters', account_id=cloud_provider.account_id) processing_engine = ProcessingEngine(filter_rules) processing_engine.run(cloud_provider) except Exception as e: print_exception('Failure while applying display filters: {}'.format(e)) return 107 # Handle exceptions if exceptions: print_info('Applying exceptions') try: exceptions = RuleExceptions(exceptions) exceptions.process(cloud_provider) exceptions = exceptions.exceptions except Exception as e: print_exception(f'Failed to load exceptions: {e}') exceptions = {} else: exceptions = {} # Finalize try: print_info('Running post-processing engine') run_parameters = { 'services': services, 'skipped_services': skipped_services, 'regions': regions, 'excluded_regions': excluded_regions, } cloud_provider.postprocessing(report.current_time, finding_rules, run_parameters) except Exception as e: print_exception( 'Failure while running post-processing engine: {}'.format(e)) return 108 # Save config and create HTML report try: html_report_path = report.save(cloud_provider, exceptions, force_write, debug) except Exception as e: print_exception('Failure while generating HTML report: {}'.format(e)) return 109 # Open the report by default if not no_browser: print_info('Opening the HTML report') url = 'file://%s' % os.path.abspath(html_report_path) webbrowser.open(url, new=2) if ERRORS_LIST: # errors were handled during execution return 200 else: return 0