def run_from_cli(): parser = ScoutSuiteArgumentParser() args = parser.parse_args() # Get the dictionary to get None instead of a crash args = args.__dict__ return run(args.get('provider'), args.get('profile'), args.get('user_account'), args.get('service_account'), args.get('cli'), args.get('msi'), args.get('service_principal'), args.get('file_auth'), args.get('tenant_id'), args.get('subscription_id'), args.get('client_id'), args.get('client_secret'), args.get('username'), args.get('password'), args.get('project_id'), args.get('folder_id'), args.get('organization_id'), args.get('all_projects'), args.get('report_name'), args.get('report_dir'), args.get('timestamp'), args.get('services'), args.get('skipped_services'), args.get('result_format'), args.get('database_name'), args.get('host_ip'), args.get('host_port'), args.get('max_workers'), args.get('regions'), args.get('fetch_local'), args.get('update'), args.get('ip_ranges'), args.get('ip_ranges_name_key'), args.get('ruleset'), args.get('exceptions'), args.get('force_write'), args.get('debug'), args.get('quiet'), args.get('log_file'), args.get('no_browser'))
def run_from_cli(): parser = ScoutSuiteArgumentParser() args = parser.parse_args() # Get the dictionary to get None instead of a crash args = args.__dict__ try: return run( provider=args.get('provider'), # AWS profile=args.get('profile'), # Azure user_account=args.get('user_account'), service_account=args.get('service_account'), cli=args.get('cli'), msi=args.get('msi'), service_principal=args.get('service_principal'), file_auth=args.get('file_auth'), tenant_id=args.get('tenant_id'), subscription_id=args.get('subscription_id'), client_id=args.get('client_id'), client_secret=args.get('client_secret'), username=args.get('username'), password=args.get('password'), # GCP project_id=args.get('project_id'), folder_id=args.get('folder_id'), organization_id=args.get('organization_id'), all_projects=args.get('all_projects'), # General report_name=args.get('report_name'), report_dir=args.get('report_dir'), timestamp=args.get('timestamp'), services=args.get('services'), skipped_services=args.get('skipped_services'), result_format=args.get('result_format'), database_name=args.get('database_name'), host_ip=args.get('host_ip'), host_port=args.get('host_port'), max_workers=args.get('max_workers'), regions=args.get('regions'), fetch_local=args.get('fetch_local'), update=args.get('update'), ip_ranges=args.get('ip_ranges'), ip_ranges_name_key=args.get('ip_ranges_name_key'), ruleset=args.get('ruleset'), exceptions=args.get('exceptions'), force_write=args.get('force_write'), debug=args.get('debug'), quiet=args.get('quiet'), log_file=args.get('log_file'), no_browser=args.get('no_browser'), programmatic_execution=False) except (KeyboardInterrupt, SystemExit): print_info('Exiting')
def main(args=None): """ Main method that runs a scan """ if not args: parser = ScoutSuiteArgumentParser() args = parser.parse_args() # Get the dictionnary to get None instead of a crash args = args.__dict__ loop = asyncio.get_event_loop() loop.set_default_executor( ThreadPoolExecutor(max_workers=args.get('max_workers'))) loop.run_until_complete(run_scan(args)) loop.close()
def main(args=None): """ Main method that runs a scan :return: """ if not args: parser = ScoutSuiteArgumentParser() args = parser.parse_args() # Get the dictionnary to get None instead of a crash args = args.__dict__ # Configure the debug level config_debug_level(args.get('debug')) # Create a cloud provider object cloud_provider = get_provider( provider=args.get('provider'), profile=args.get('profile'), project_id=args.get('project_id'), folder_id=args.get('folder_id'), organization_id=args.get('organization_id'), all_projects=args.get('all_projects'), report_dir=args.get('report_dir'), timestamp=args.get('timestamp'), services=args.get('services'), skipped_services=args.get('skipped_services'), thread_config=args.get('thread_config')) report_file_name = generate_report_name(cloud_provider.provider_code, args) # TODO: move this to after authentication, so that the report can be more specific to what's being scanned. # For example if scanning with a GCP service account, the SA email can only be known after authenticating... # Create a new report report = Scout2Report(args.get('provider'), report_file_name, args.get('report_dir'), args.get('timestamp')) # Complete run, including pulling data from provider if not args.get('fetch_local'): # Authenticate to the cloud provider authenticated = cloud_provider.authenticate( profile=args.get('profile'), user_account=args.get('user_account'), service_account=args.get('service_account'), cli=args.get('cli'), msi=args.get('msi'), service_principal=args.get('service_principal'), file_auth=args.get('file_auth'), tenant_id=args.get('tenant_id'), subscription_id=args.get('subscription_id'), client_id=args.get('client_id'), client_secret=args.get('client_secret'), username=args.get('username'), password=args.get('password')) if not authenticated: return 401 # Fetch data from provider APIs try: cloud_provider.fetch(regions=args.get('regions')) except KeyboardInterrupt: print_info('\nCancelled by user') return 130 # Update means we reload the whole config and overwrite part of it if args.get('update'): current_run_services = copy.deepcopy(cloud_provider.services) last_run_dict = report.jsrw.load_from_file(AWSCONFIG) cloud_provider.services = last_run_dict['services'] for service in cloud_provider.service_list: cloud_provider.services[service] = current_run_services[ service] # Partial run, using pre-pulled data else: # Reload to flatten everything into a python dictionary last_run_dict = report.jsrw.load_from_file(AWSCONFIG) for key in last_run_dict: setattr(cloud_provider, key, last_run_dict[key]) # Pre processing cloud_provider.preprocessing(args.get('ip_ranges'), args.get('ip_ranges_name_key')) # Analyze config finding_rules = Ruleset(environment_name=args.get('profile'), cloud_provider=args.get('provider'), filename=args.get('ruleset'), ip_ranges=args.get('ip_ranges'), aws_account_id=cloud_provider.aws_account_id) processing_engine = ProcessingEngine(finding_rules) processing_engine.run(cloud_provider) # Create display filters filter_rules = Ruleset(cloud_provider=args.get('provider'), filename='filters.json', rule_type='filters', aws_account_id=cloud_provider.aws_account_id) processing_engine = ProcessingEngine(filter_rules) processing_engine.run(cloud_provider) # Handle exceptions try: exceptions = RuleExceptions(args.get('profile'), args.get('exceptions')[0]) exceptions.process(cloud_provider) exceptions = exceptions.exceptions except Exception as e: print_debug( 'Warning, failed to load exceptions. The file may not exist or may have an invalid format.' ) exceptions = {} # Finalize cloud_provider.postprocessing(report.current_time, finding_rules) # TODO: this is AWS-specific - move to postprocessing? # This is partially implemented # Get organization data if it exists try: profile = AWSProfiles.get(args.get('profile'))[0] if 'source_profile' in profile.attributes: organization_info_file = os.path.join( os.path.expanduser('~/.aws/recipes/%s/organization.json' % profile.attributes['source_profile'])) if os.path.isfile(organization_info_file): with open(organization_info_file, 'rt') as f: org = {} accounts = json.load(f) for account in accounts: account_id = account.pop('Id') org[account_id] = account setattr(cloud_provider, 'organization', org) except Exception as e: pass # Save config and create HTML report html_report_path = report.save(cloud_provider, exceptions, args.get('force_write'), args.get('debug')) # Open the report by default if not args.get('no_browser'): print_info('Opening the HTML report...') url = 'file://%s' % os.path.abspath(html_report_path) webbrowser.open(url, new=2) return 0
def run_from_cli(): parser = ScoutSuiteArgumentParser() args = parser.parse_args() # Get the dictionary to get None instead of a crash args = args.__dict__ # TODO provider-specific arguments should be prepended with the provider's code # (e.g. aws_profile, azure_user_account) try: return run( provider=args.get('provider'), # AWS profile=args.get('profile'), aws_access_key_id=args.get('aws_access_key_id'), aws_secret_access_key=args.get('aws_secret_access_key'), aws_session_token=args.get('aws_session_token'), # Azure cli=args.get('cli'), user_account=args.get('user_account'), user_account_browser=args.get('user_account_browser'), service_account=args.get('service_account'), msi=args.get('msi'), service_principal=args.get('service_principal'), file_auth=args.get('file_auth'), client_id=args.get('client_id'), client_secret=args.get('client_secret'), username=args.get('username'), password=args.get('password'), tenant_id=args.get('tenant_id'), subscription_ids=args.get('subscription_ids'), all_subscriptions=args.get('all_subscriptions'), # GCP project_id=args.get('project_id'), folder_id=args.get('folder_id'), organization_id=args.get('organization_id'), all_projects=args.get('all_projects'), # Aliyun access_key_id=args.get('access_key_id'), access_key_secret=args.get('access_key_secret'), # GitHub repositories=args.get('repositories'), # General report_name=args.get('report_name'), report_dir=args.get('report_dir'), timestamp=args.get('timestamp'), services=args.get('services'), skipped_services=args.get('skipped_services'), list_services=args.get('list_services'), result_format=args.get('result_format'), database_name=args.get('database_name'), host_ip=args.get('host_ip'), host_port=args.get('host_port'), max_workers=args.get('max_workers'), regions=args.get('regions'), excluded_regions=args.get('excluded_regions'), fetch_local=args.get('fetch_local'), update=args.get('update'), max_rate=args.get('max_rate'), ip_ranges=args.get('ip_ranges'), ip_ranges_name_key=args.get('ip_ranges_name_key'), ruleset=args.get('ruleset'), exceptions=args.get('exceptions'), force_write=args.get('force_write'), debug=args.get('debug'), quiet=args.get('quiet'), log_file=args.get('log_file'), no_browser=args.get('no_browser'), programmatic_execution=False) except (KeyboardInterrupt, SystemExit): print_info('Exiting') return 130