def test_get_stackset_ready_accounts(self): accounts_ready = [get_aws_account_id(self.creds)] test1 = get_stackset_ready_accounts(self.creds, accounts_ready) assert (test1 == accounts_ready) test2 = get_stackset_ready_accounts(self.creds, accounts_ready + ['123456789012']) assert (test2 == accounts_ready)
def get_credentials(self): # For now, use the existing code... self.credentials = read_creds(self.name) try: self.account_id = get_aws_account_id(self.credentials) except: pass return self.credentials
def test_create_stack_instances(self): stack_set_name = self.make_travisname('OpinelUnitTestStackSet002') create_stack_set(self.api_client, stack_set_name, 'tests/data/cloudformation-004.json', wait_for_completion=True) operation_id = create_stack_instances(self.api_client, stack_set_name, [get_aws_account_id(self.creds)], ['us-east-1']) wait_for_operation(self.api_client, stack_set_name, operation_id) self.cleanup['stacksets'].append(stack_set_name)
def authenticate(self, profile, csv_credentials, mfa_serial, mfa_code, **kwargs): """ Implement authentication for the AWS provider :return: """ self.credentials = read_creds(profile, csv_credentials, mfa_serial, mfa_code) self.aws_account_id = get_aws_account_id(self.credentials) if self.credentials['AccessKeyId'] is None: return False else: return True
def tweak_params(self, params, credentials): if type(params) == dict: for k in params: params[k] = self.tweak_params(params[k], credentials) elif type(params) == list: newparams = [] for v in params: newparams.append(self.tweak_params(v, credentials)) params = newparams else: if params == '_AWS_ACCOUNT_ID_': params = get_aws_account_id(credentials) return params
def main(): # Parse arguments parser = Scout2ArgumentParser() args = parser.parse_args() # Configure the debug level configPrintException(args.debug) # Check version of opinel if not check_requirements(os.path.realpath(__file__)): return 42 # Set the profile name profile_name = args.profile[0] # Search for AWS credentials if not args.fetch_local: credentials = read_creds(args.profile[0], args.csv_credentials, args.mfa_serial, args.mfa_code) if credentials['AccessKeyId'] is None: return 42 # Create a new Scout2 config report = Scout2Report(profile_name, args.report_dir, args.timestamp) aws_config = Scout2Config(profile_name, args.report_dir, args.timestamp, args.services, args.skipped_services, args.thread_config) if not args.fetch_local: # Fetch data from AWS APIs if not running a local analysis try: aws_config.fetch(credentials, regions=args.regions, partition_name=get_partition_name(credentials)) except KeyboardInterrupt: printInfo('\nCancelled by user') return 130 aws_config = report.jsrw.to_dict(aws_config) # Set the account ID aws_config['aws_account_id'] = get_aws_account_id(credentials) # Update means we reload the whole config and overwrite part of it if args.update == True: new_aws_config = copy.deepcopy(aws_config) aws_config = report.jsrw.load_from_file(AWSCONFIG) for service in new_aws_config['service_list']: # Per service only for now, may add per region & per VPC later... aws_config['services'][service] = new_aws_config['services'][ service] # Update the metadata too aws_config['metadata'] = Scout2Config('default', None, None, [], []).metadata else: # Reload to flatten everything into a python dictionary aws_config = report.jsrw.load_from_file(AWSCONFIG) # Pre processing preprocessing(aws_config, args.ip_ranges, args.ip_ranges_name_key) # Analyze config finding_rules = Ruleset(profile_name, filename=args.ruleset, ip_ranges=args.ip_ranges, aws_account_id=aws_config['aws_account_id']) pe = ProcessingEngine(finding_rules) pe.run(aws_config) # Create display filters filter_rules = Ruleset(filename='filters.json', rule_type='filters', aws_account_id=aws_config['aws_account_id']) pe = ProcessingEngine(filter_rules) pe.run(aws_config) # Handle exceptions try: exceptions = RuleExceptions(profile_name, args.exceptions[0]) exceptions.process(aws_config) exceptions = exceptions.exceptions except Exception as e: printDebug( 'Warning, failed to load exceptions. The file may not exist or may have an invalid format.' ) exceptions = {} # Finalize postprocessing(aws_config, report.current_time, finding_rules) # Get organization data if it exists try: profile = AWSProfiles.get(profile_name)[0] if 'source_profile' in profile.attributes: organization_info_file = os.path.join( os.path.expanduser('~/.aws/recipes/%s/organization.json' % profile.attributes['source_profile'])) if os.path.isfile(organization_info_file): with open(organization_info_file, 'rt') as f: org = {} accounts = json.load(f) for account in accounts: account_id = account.pop('Id') org[account_id] = account aws_config['organization'] = org except: pass if args.json: printInfo('Writing to results.json') fp = open('results.json', 'w') json.dump(aws_config, fp, default=json_helper) fp.close() sys.exit() # Save config and create HTML report html_report_path = report.save(aws_config, exceptions, args.force_write, args.debug) # Open the report by default if not args.no_browser: printInfo('Opening the HTML report...') url = 'file://%s' % os.path.abspath(html_report_path) webbrowser.open(url, new=2)
def main(): # Parse arguments parser = OpinelArgumentParser() parser.add_argument('debug') parser.add_argument('profile') parser.add_argument('regions') parser.add_argument('partition-name') parser.add_argument('bucket-name') parser.parser.add_argument('--aws-account-id', dest='aws_account_id', default=[ None ], nargs='+', help='Bleh.') parser.parser.add_argument('--from', dest='from_date', default=[ None ], nargs='+', help='Bleh.') parser.parser.add_argument('--to', dest='to_date', default=[ None ], nargs='+', help='Bleh.') args = parser.parse_args() # Configure the debug level configPrintException(args.debug) # Check version of opinel if not check_requirements(os.path.realpath(__file__)): return 42 # Arguments profile_name = args.profile[0] try: from_date = datetime.datetime.strptime(args.from_date[0], "%Y/%m/%d").date() to_date = datetime.datetime.strptime(args.to_date[0], "%Y/%m/%d").date() delta = to_date - from_date except Exception as e: printException(e) printError('Error: dates must be formatted of the following format YYYY/MM/DD') return 42 if delta.days < 0: printError('Error: your \'to\' date is earlier than your \'from\' date') return 42 # Search for AWS credentials credentials = read_creds(profile_name) if not credentials['AccessKeyId']: return 42 # Fetch AWS account ID if not args.aws_account_id[0]: printInfo('Fetching the AWS account ID...') aws_account_id = get_aws_account_id(credentials) else: aws_account_id = args.aws_account_id[0] global cloudtrail_log_path cloudtrail_log_path = cloudtrail_log_path.replace('AWS_ACCOUNT_ID', aws_account_id) # Create download dir if not os.path.exists(download_folder): os.makedirs(download_folder) # Iterate through regions s3_clients = {} for region in build_region_list('cloudtrail', args.regions, args.partition_name): # Connect to CloudTrail cloudtrail_client = connect_service('cloudtrail', credentials, region) if not cloudtrail_client: continue # Get information about the S3 bucket that receives CloudTrail logs trails = cloudtrail_client.describe_trails() for trail in trails['trailList']: bucket_name = trail['S3BucketName'] prefix = trail['S3KeyPrefix'] if 'S3KeyPrefix' in trail else '' # Connect to S3 manage_dictionary(s3_clients, region, connect_service('s3', credentials, region)) target_bucket_region = get_s3_bucket_location(s3_clients[region], bucket_name) manage_dictionary(s3_clients, target_bucket_region, connect_service('s3', credentials, target_bucket_region)) s3_client = s3_clients[target_bucket_region] # Generate base path for files log_path = os.path.join(prefix, cloudtrail_log_path.replace('REGION', region)) # Download files printInfo('Downloading log files in %s... ' % region, False) keys = [] for i in range(delta.days + 1): day = from_date + timedelta(days=i) folder_path = os.path.join(log_path, day.strftime("%Y/%m/%d")) try: objects = handle_truncated_response(s3_client.list_objects, {'Bucket': bucket_name, 'Prefix': folder_path}, ['Contents']) for o in objects['Contents']: keys.append([o['Key'], 0]) except Exception as e: printException(e) pass thread_work(keys, download_object, params = {'Bucket': bucket_name, 'S3Client': s3_client}, num_threads = 100) printInfo('Done') # Iterate through files and gunzip 'em printInfo('Decompressing files...') gzlogs = [] for root, dirnames, filenames in os.walk(download_folder): for filename in filenames: gzlogs.append(filename) thread_work(gzlogs, gunzip_file, num_threads = 30)
def main(): # Parse arguments parser = OpinelArgumentParser() parser.add_argument('debug') parser.add_argument('profile') parser.add_argument('managed', dest='is_managed', default=False, action='store_true', help='Create a managed policy.') parser.add_argument( 'type', default=[None], nargs='+', choices=['group', 'managed', 'role', 'user'], help='Type of target that the policy will apply or be attached to.') parser.add_argument( 'targets', default=[], nargs='+', help= 'Name of the IAM entity the policy will be added to (required for inline policies).' ) parser.add_argument( 'templates', default=[], nargs='+', help='Path to the template IAM policies that will be created.') parser.add_argument('save', dest='save_locally', default=False, action='store_true', help='Generates the policies and store them locally.') args = parser.parse_args() # Configure the debug level configPrintException(args.debug) # Check version of opinel if not check_requirements(os.path.realpath(__file__)): return 42 # Arguments profile_name = args.profile[0] target_type = args.type[0] if len(args.templates) == 0: printError( 'Error: you must specify the path the template IAM policies.') return 42 if not args.is_managed and target_type == None: printError( 'Error: you must either create a managed policy or specify the type of IAM entity the policy will be attached to.' ) return 42 if not args.is_managed and target_type == None and len(args.targets) < 1: printError( 'Error: you must provide the name of at least one IAM %s you will attach this inline policy to.' % target_type) return 42 # Read creds credentials = read_creds(args.profile[0]) if not credentials['AccessKeyId']: return 42 # Connect to IAM APIs iam_client = connect_service('iam', credentials) if not iam_client: return 42 # Get AWS account ID aws_account_id = get_aws_account_id(credentials) # Create the policies for template in args.templates: if not os.path.isfile(template): printError('Error: file \'%s\' does not exist.' % template) continue with open(template, 'rt') as f: policy = f.read() policy = re_aws_account_id.sub(aws_account_id, policy) policy_name = os.path.basename(template).split('.')[0] if not args.is_managed: callback = getattr(iam_client, 'put_' + target_type + '_policy') params = {} params['PolicyName'] = policy_name params['PolicyDocument'] = policy for target in args.targets: params[target_type.title() + 'Name'] = target try: printInfo( 'Creating policy \'%s\' for the \'%s\' IAM %s...' % (policy_name, target, target_type)) callback(**params) except Exception as e: printException(e) pass else: params = {} params['PolicyDocument'] = policy params['PolicyName'] = policy_name description = '' # Search for a description file descriptions_dir = os.path.join(os.path.dirname(template), 'descriptions') if os.path.exists(descriptions_dir): description_file = os.path.join( descriptions_dir, os.path.basename(template).replace('.json', '.txt')) if os.path.isfile(description_file): with open(description_file, 'rt') as f: params['Description'] = f.read() elif prompt_4_yes_no( 'Do you want to add a description to the \'%s\' policy' % policy_name): params['Description'] = prompt_4_value( 'Enter the policy description:') params['Description'] = params['Description'].strip() printInfo('Creating policy \'%s\'...' % (policy_name)) new_policy = iam_client.create_policy(**params) if len(args.targets): callback = getattr(iam_client, 'attach_' + target_type + '_policy') for target in args.targets: printInfo('Attaching policy to the \'%s\' IAM %s...' % (target, target_type)) params = {} params['PolicyArn'] = new_policy['Policy']['Arn'] params[target_type.title() + 'Name'] = target callback(**params) if args.save_locally: with open('%s-%s.json' % (policy_name, profile_name), 'wt') as f: f.write(policy) f.close()
def main(): # Parse arguments parser = OpinelArgumentParser() parser.add_argument('debug') parser.add_argument('profile') parser.add_argument('regions', help = 'Regions where stack instances will be created.') parser.add_argument('partition-name') parser.parser.add_argument('--stack-set-region', dest='stack_set_region', default=None, required=True, help='Region where the stack set will be created.') args = parser.parse_args() # Configure the debug level configPrintException(args.debug) # Check version of opinel if not check_requirements(os.path.realpath(__file__)): return 42 # Get profile name profile_name = args.profile[0] # Search for AWS credentials credentials = read_creds(profile_name) if not credentials['AccessKeyId']: return 42 # Validate the stack set region regions = build_region_list('events', args.regions, args.partition_name) if args.stack_set_region not in regions: printError('Error, the stack set region \'%s\' is not valid. Acceptable values are:' % args.stack_set_region) printError(', '.join(regions)) return 42 # Determine the master account id to exclude it from the list of accounts to be configured for event forwarding monitoring_account_id = get_aws_account_id(credentials) # Connect to the AWS Organizations API api_client = connect_service('organizations', credentials) # List all accounts in the organization org_account_ids = [] org_accounts = handle_truncated_response(api_client.list_accounts, {}, ['Accounts'])['Accounts'] org_account_ids = [ account['Id'] for account in org_accounts if account['Status'] == 'ACTIVE' and account['Id'] != monitoring_account_id ] printInfo('Found %d accounts in the organization.' % len(org_account_ids)) printDebug(str(org_account_ids)) # Verify that the account has been configured for stack sets by attempting to assume the stack set execution role api_client = connect_service('sts', credentials, silent = True) configured_org_account_ids = [] for account_id in org_account_ids: try: role_arn = 'arn:aws:iam::%s:role/AWSCloudFormationStackSetExecutionRole' % account_id api_client.assume_role(RoleArn = role_arn, RoleSessionName = 'foobar') configured_org_account_ids.append(account_id) except Exception as e: pass if len(configured_org_account_ids) != len(org_account_ids): printInfo('Only %d of these accounts have the necessary stack set execution role:' % len(configured_org_account_ids)) printInfo(str(configured_org_account_ids)) # For each region with cloudwatch events, put a permission for each account printInfo('Adding permissions on the default event buses...') for region in regions: api_client = connect_service('events', credentials, region) for account in org_accounts: account_id = account['Id'] if account_id not in configured_org_account_ids: continue account_name = account['Name'] api_client.put_permission(Action = 'events:PutEvents', Principal = account_id, StatementId = 'AWSRecipesAllow%s' % account_id) # Create the stack set try: stack_set_name = 'CloudwatchEventsForwarding' api_client = connect_service('cloudformation', credentials, args.stack_set_region) # TBD: need for the region where the stack set is created and the regions where the stack instances are created... template_path = os.path.join((os.path.dirname(os.path.realpath(__file__))), '../CloudFormationTemplates/CloudwatchEventsForwarding.region.yml') with open(template_path, 'rt') as f: template_body = f.read() template_parameters = [ {'ParameterKey': 'EventsMonitoringAccountID', 'ParameterValue': get_aws_account_id(credentials) } ] printInfo('Creating the stack set...') response = api_client.create_stack_set(StackSetName = stack_set_name, TemplateBody = template_body, Parameters = template_parameters) except Exception as e: if e.response['Error']['Code'] != 'NameAlreadyExistsException': printException(e) printError('Failed to create the stack set.') return 42 # Create the stack instances: one per region in every account operation_preferences = { 'FailureTolerancePercentage': 100, 'MaxConcurrentPercentage': 100 } response = api_client.create_stack_instances(StackSetName = stack_set_name, Accounts = configured_org_account_ids, Regions = regions, OperationPreferences = operation_preferences) printInfo('Successfully started operation Id %s' % response['OperationId'])
def main(): # Parse arguments parser = Scout2ArgumentParser() args = parser.parse_args() # Configure the debug level configPrintException(args.debug) # Check version of opinel if not check_requirements(os.path.realpath(__file__)): return 42 # Set the profile name profile_name = args.profile[0] # Search for AWS credentials if not args.fetch_local: credentials = read_creds(args.profile[0], args.csv_credentials, args.mfa_serial, args.mfa_code) if credentials['AccessKeyId'] is None: return 42 # Create a new Scout2 config report = Scout2Report(profile_name, args.report_dir, args.timestamp) aws_config = Scout2Config(profile_name, args.report_dir, args.timestamp, args.services, args.skipped_services) if not args.fetch_local: # Fetch data from AWS APIs if not running a local analysis try: aws_config.fetch(credentials, regions=args.regions, partition_name=args.partition_name) except KeyboardInterrupt: printInfo('\nCancelled by user') return 130 aws_config = report.jsrw.to_dict(aws_config) # Set the account ID aws_config['aws_account_id'] = get_aws_account_id(credentials) # Update means we reload the whole config and overwrite part of it if args.update == True: new_aws_config = copy.deepcopy(aws_config) aws_config = report.jsrw.load_from_file(AWSCONFIG) for service in new_aws_config['service_list']: # Per service only for now, may add per region & per VPC later... aws_config['services'][service] = new_aws_config['services'][service] # Update the metadata too aws_config['metadata'] = Scout2Config('default', None, None, [], []).metadata else: # Reload to flatten everything into a python dictionary aws_config = report.jsrw.load_from_file(AWSCONFIG) # Pre processing preprocessing(aws_config, args.ip_ranges, args.ip_ranges_name_key) # Analyze config ruleset = Ruleset(profile_name, filename = args.ruleset, ip_ranges = args.ip_ranges) ruleset.analyze(aws_config) # Create display filters filters = Ruleset(filename = 'filters.json', rule_type = 'filters') filters.analyze(aws_config) # Handle exceptions process_exceptions(aws_config, args.exceptions[0]) # Finalize postprocessing(aws_config, report.current_time, ruleset) # Save config and create HTML report html_report_path = report.save(aws_config, {}, args.force_write, args.debug) # Open the report by default if not args.no_browser: printInfo('Opening the HTML report...') url = 'file://%s' % os.path.abspath(html_report_path) webbrowser.open(url, new=2)
def main(): # Parse arguments parser = OpinelArgumentParser() parser.add_argument('debug') parser.add_argument('profile') parser.add_argument('regions', help='Regions where stack instances will be created.') parser.add_argument('partition-name') parser.parser.add_argument( '--master-region', dest='master_region', default=None, required=True, help='Region where the global stacks and stack sets will be created.') parser.parser.add_argument( '--stack-prefix', dest='stack_prefix', default=None, required=True, help= 'Prefix of the CF Templates to be used when creating/updating stacks.') args = parser.parse_args() # Configure the debug level configPrintException(args.debug) # Check version of opinel if not check_requirements(os.path.realpath(__file__)): return 42 # Get profile name profile_name = args.profile[0] # Search for AWS credentials credentials = read_creds(profile_name) if not credentials['AccessKeyId']: return 42 # Get the master AWS account ID master_account_id = get_aws_account_id(credentials) # Get list of accounts ready for Stack sets api_client = connect_service('organizations', credentials, silent=True) try: org_account_ids = get_organization_account_ids(api_client, quiet=False) except: org_account_ids = [master_account_id] configured_org_account_ids = get_stackset_ready_accounts(credentials, org_account_ids, quiet=False) # Validate the stack set region regions = build_region_list('cloudformation', args.regions, args.partition_name) if args.master_region not in regions: printError( 'Error, the stack set region \'%s\' is not valid. Acceptable values are:' % args.master_region) printError(', '.join(regions)) return 42 # Connect printInfo('') api_client = connect_service('cloudformation', credentials, args.master_region, silent=True) # Establish the list of existing stacks and stack sets deployed_resources = {'stacks': {}, 'stack_sets': {}} printInfo('Fetching existing stacks and stack sets in %s in %s...' % (args.master_region, master_account_id)) for stack in handle_truncated_response( api_client.list_stacks, {}, ['StackSummaries'])['StackSummaries']: if stack['StackStatus'] not in ['CREATE_FAILED', 'DELETE_COMPLETE']: deployed_resources['stacks'][stack['StackName']] = stack for stack_set in handle_truncated_response(api_client.list_stack_sets, {'Status': 'ACTIVE'}, ['Summaries'])['Summaries']: stack_set = api_client.describe_stack_set( StackSetName=stack_set['StackSetName'])['StackSet'] deployed_resources['stack_sets'][stack_set['StackSetName']] = stack_set printInfo(' - Found %d stacks.' % len(deployed_resources['stacks'])) for stack_name in deployed_resources['stacks']: printInfo(' - %s' % stack_name) printInfo(' - Found %d stacks sets.' % len(deployed_resources['stack_sets'])) for stack_set_name in deployed_resources['stack_sets']: printInfo(' - %s' % stack_set_name) # Create the list of stacks to deploy templates = get_cloudformation_templates(args.stack_prefix) # Master stacks for stack_name in sorted(templates['master_stacks'].keys()): if stack_name not in deployed_resources['stacks']: create_stack(api_client, stack_name, templates['master_stacks'][stack_name]['file_path'], wait_for_completion=templates['master_stacks'] [stack_name]['wait_for_completion']) elif resource_older_than_template( 'stack', deployed_resources['stacks'][stack_name], templates['master_stacks'][stack_name]['file_path']): update_stack(api_client, stack_name, templates['master_stacks'][stack_name]['file_path'], wait_for_completion=templates['master_stacks'] [stack_name]['wait_for_completion']) if len(configured_org_account_ids) == 0: printInfo( '\nNo account IDs that support stack sets were found, skipping stack set configuration.' ) return return # Stack sets for stack_set_name in sorted(templates['master_stack_sets'].keys()): if stack_set_name not in deployed_resources['stack_sets']: create_stack_set( api_client, stack_set_name, templates['master_stack_sets'][stack_set_name]['file_path'], wait_for_completion=True) elif resource_older_than_template( 'stack_set', deployed_resources['stack_sets'][stack_set_name], templates['master_stack_sets'][stack_set_name]['file_path']): update_stack_set( api_client, stack_set_name, templates['master_stack_sets'][stack_set_name]['file_path'], wait_for_completion=True)
def main(): # Parse arguments parser = Scout2ArgumentParser() args = parser.parse_args() # Configure the debug level configPrintException(args.debug) # Check version of opinel if not check_requirements(os.path.realpath(__file__)): return 42 # Set the profile name profile_name = args.profile[0] # Search for AWS credentials if not args.fetch_local: credentials = read_creds(args.profile[0], args.csv_credentials, args.mfa_serial, args.mfa_code) if credentials['AccessKeyId'] is None: return 42 # Create a new Scout2 config report = Scout2Report(profile_name, args.report_dir, args.timestamp) aws_config = Scout2Config(profile_name, args.report_dir, args.timestamp, args.services, args.skipped_services, args.thread_config) if not args.fetch_local: # Fetch data from AWS APIs if not running a local analysis try: aws_config.fetch(credentials, regions=args.regions, partition_name = get_partition_name(credentials)) except KeyboardInterrupt: printInfo('\nCancelled by user') return 130 aws_config = report.jsrw.to_dict(aws_config) # Set the account ID aws_config['aws_account_id'] = get_aws_account_id(credentials) # Update means we reload the whole config and overwrite part of it if args.update == True: new_aws_config = copy.deepcopy(aws_config) aws_config = report.jsrw.load_from_file(AWSCONFIG) for service in new_aws_config['service_list']: # Per service only for now, may add per region & per VPC later... aws_config['services'][service] = new_aws_config['services'][service] # Update the metadata too aws_config['metadata'] = Scout2Config('default', None, None, [], []).metadata else: # Reload to flatten everything into a python dictionary aws_config = report.jsrw.load_from_file(AWSCONFIG) # Pre processing preprocessing(aws_config, args.ip_ranges, args.ip_ranges_name_key) # Analyze config finding_rules = Ruleset(profile_name, filename = args.ruleset, ip_ranges = args.ip_ranges, aws_account_id = aws_config['aws_account_id']) pe = ProcessingEngine(finding_rules) pe.run(aws_config) # Create display filters filter_rules = Ruleset(filename = 'filters.json', rule_type = 'filters', aws_account_id = aws_config['aws_account_id']) pe = ProcessingEngine(filter_rules) pe.run(aws_config) # Handle exceptions try: exceptions = RuleExceptions(profile_name, args.exceptions[0]) exceptions.process(aws_config) exceptions = exceptions.exceptions except Exception as e: printDebug('Warning, failed to load exceptions. The file may not exist or may have an invalid format.') exceptions = {} # Finalize postprocessing(aws_config, report.current_time, finding_rules) # Get organization data if it exists try: profile = AWSProfiles.get(profile_name)[0] if 'source_profile' in profile.attributes: organization_info_file = os.path.join(os.path.expanduser('~/.aws/recipes/%s/organization.json' % profile.attributes['source_profile'])) if os.path.isfile(organization_info_file): with open(organization_info_file, 'rt') as f: org = {} accounts = json.load(f) for account in accounts: account_id = account.pop('Id') org[account_id] = account aws_config['organization'] = org except: pass # Save config and create HTML report html_report_path = report.save(aws_config, exceptions, args.force_write, args.debug) # Open the report by default if not args.no_browser: printInfo('Opening the HTML report...') url = 'file://%s' % os.path.abspath(html_report_path) webbrowser.open(url, new=2) return 0