async def _get_and_set_inline_policies(self, resource, iam_resource_type): client = AWSFacadeUtils.get_client('iam', self.session) list_policy_method = getattr(client, 'list_' + iam_resource_type + '_policies') resource_name = resource[iam_resource_type.title() + 'Name'] args = {iam_resource_type.title() + 'Name': resource_name} resource['inline_policies'] = {} try: policy_names = await run_concurrently( lambda: list_policy_method(**args)['PolicyNames']) if len(policy_names) == 0: resource['inline_policies_count'] = 0 except Exception as e: print_exception('Failed to list IAM policy: {}'.format(e)) else: get_policy_method = getattr(client, 'get_' + iam_resource_type + '_policy') try: tasks = { asyncio.ensure_future( run_concurrently(lambda: get_policy_method(**dict( args, PolicyName=policy_name)))) for policy_name in policy_names } except Exception as e: print_exception('Failed to get policy methods: {}'.format(e)) else: for task in asyncio.as_completed(tasks): policy = await task policy_name = policy['PolicyName'] policy_id = get_non_provider_id(policy_name) policy_document = policy['PolicyDocument'] resource['inline_policies'][policy_id] = {} resource['inline_policies'][policy_id][ 'PolicyDocument'] = self._normalize_statements( policy_document) resource['inline_policies'][policy_id][ 'name'] = policy_name resource['inline_policies_count'] = len( resource['inline_policies'])
def _match_networks_and_firewalls(self): """ For each network, math firewall rules in that network :return: """ try: if 'computeengine' in self.service_list: for project in self.services['computeengine']['projects'].values(): for network in project['networks'].values(): network['firewalls'] = [] for firewall in project['firewalls'].values(): firewall['network_id'] = None if firewall['network_url'] == network['network_url']: network['firewalls'].append(firewall['id']) firewall['network_id'] = network['id'] except Exception as e: print_exception('Unable to match firewalls and networks: {}'.format(e))
def run(self, cloud_provider, skip_dashboard=False): # Clean up existing findings for service in cloud_provider.services: cloud_provider.services[service][self.ruleset.rule_type] = {} # Process each rule for finding_path in self._filter_rules(self.rules, cloud_provider.service_list): for rule in self.rules[finding_path]: if not rule.enabled: # or rule.service not in []: # TODO: handle this... continue print_debug('Processing %s rule[%s]: "%s"' % (rule.service, rule.filename, rule.description)) finding_path = rule.path path = finding_path.split('.') service = path[0] manage_dictionary(cloud_provider.services[service], self.ruleset.rule_type, {}) cloud_provider.services[service][self.ruleset.rule_type][rule.key] = {} cloud_provider.services[service][self.ruleset.rule_type][rule.key]['description'] = rule.description cloud_provider.services[service][self.ruleset.rule_type][rule.key]['path'] = rule.path for attr in ['level', 'id_suffix', 'display_path']: if hasattr(rule, attr): cloud_provider.services[service][self.ruleset.rule_type][rule.key][attr] = getattr(rule, attr) try: setattr(rule, 'checked_items', 0) cloud_provider.services[service][self.ruleset.rule_type][rule.key]['items'] = recurse( cloud_provider.services, cloud_provider.services, path, [], rule, True) if skip_dashboard: continue cloud_provider.services[service][self.ruleset.rule_type][rule.key][ 'dashboard_name'] = rule.dashboard_name cloud_provider.services[service][self.ruleset.rule_type][rule.key][ 'checked_items'] = rule.checked_items cloud_provider.services[service][self.ruleset.rule_type][rule.key]['flagged_items'] = len( cloud_provider.services[service][self.ruleset.rule_type][rule.key]['items']) cloud_provider.services[service][self.ruleset.rule_type][rule.key]['service'] = rule.service cloud_provider.services[service][self.ruleset.rule_type][rule.key][ 'rationale'] = rule.rationale if hasattr(rule, 'rationale') else 'No description available.' except Exception as e: print_exception('Failed to process rule defined in %s: %s' % (rule.filename, e)) # Fallback if process rule failed to ensure report creation and data dump still happen cloud_provider.services[service][self.ruleset.rule_type][rule.key]['checked_items'] = 0 cloud_provider.services[service][self.ruleset.rule_type][rule.key]['flagged_items'] = 0
def save_to_file(self, content, file_type, force_write, debug): config_path, first_line = get_filename(file_type, self.report_name, self.report_dir) print_info('Saving data to %s' % config_path) try: with self.__open_file(config_path, force_write) as f: if first_line: print('%s' % first_line, file=f) print('%s' % json.dumps(content, indent=4 if debug else None, separators=(',', ': '), sort_keys=True, cls=ScoutJsonEncoder), file=f) except AttributeError as e: # __open_file returned None pass except Exception as e: print_exception(e)
async def _get_and_set_s3_bucket_logging(self, bucket): client = AWSFacadeUtils.get_client( 's3', self.session, bucket['region'], ) try: logging = await run_concurrently( lambda: client.get_bucket_logging(Bucket=bucket['Name'])) except Exception as e: print_exception('Failed to get logging configuration for %s: %s' % (bucket['Name'], e)) bucket['logging'] = 'Unknown' else: if 'LoggingEnabled' in logging: bucket['logging'] = \ logging['LoggingEnabled']['TargetBucket'] + '/' + logging['LoggingEnabled']['TargetPrefix'] else: bucket['logging'] = 'Disabled'
async def _get_and_set_mount_targets(self, file_system: {}, region: str): try: file_system['MountTargets'] = {} mount_targets = await AWSFacadeUtils.get_all_pages( 'efs', region, self.session, 'describe_mount_targets', 'MountTargets', FileSystemId=file_system['FileSystemId']) except Exception as e: print_exception('Failed to get and set EFS mount targets: {}'.format(e)) else: if len(mount_targets) == 0: return for mount_target in mount_targets: mount_target_id = mount_target['MountTargetId'] file_system['MountTargets'][mount_target_id] = mount_target await get_and_set_concurrently( [self._get_and_set_mount_target_security_groups], mount_targets, region=region)
async def get_instance_user_data(self, region: str, instance_id: str): ec2_client = AWSFacadeUtils.get_client('ec2', self.session, region) try: user_data_response = await run_concurrently( lambda: ec2_client.describe_instance_attribute( Attribute='userData', InstanceId=instance_id)) except Exception as e: print_exception(f'Failed to describe EC2 instance attributes: {e}') return None else: if 'Value' not in user_data_response['UserData'].keys(): return None else: try: return await self._decode_user_data( user_data_response['UserData']['Value']) except Exception as e: print_exception( f'Unable to decode EC2 instance user data: {e}')
def connect_service(service, credentials, region_name=None, config=None, silent=False): """ Instantiates an AWS API client :param service: Service targeted, e.g. ec2 :param credentials: Id, secret, token :param region_name: Region desired, e.g. us-east-2 :param config: Configuration (optional) :param silent: Whether or not to print messages :return: """ api_client = None try: client_params = {'service_name': service.lower()} """ Since session is being taken care at AWSAuthentication Strategy It is not necessary to initiate the session here. session_params = {'aws_access_key_id': credentials.get('access_key'), 'aws_secret_access_key': credentials.get('secret_key'), 'aws_session_token': credentials.get('token')} """ if region_name: client_params['region_name'] = region_name # session_params['region_name'] = region_name if config: client_params['config'] = config aws_session = credentials if not silent: info_message = 'Connecting to AWS %s' % service if region_name: info_message = info_message + ' in %s' % region_name print_info('%s...' % info_message) api_client = aws_session.client(**client_params) except Exception as e: print_exception(e) return api_client
def pass_conditions(all_info, current_path, conditions, unknown_as_pass_condition=False): """ Check that all conditions are passed for the current path. :param all_info: All of the services' data :param current_path: The value of the `path` variable defined in the finding file :param conditions: The conditions to check as defined in the finding file :param unknown_as_pass_condition: Consider an undetermined condition as passed :return: """ # Fixes circular dependency from ScoutSuite.providers.base.configs.browser import get_value_at if len(conditions) == 0: return True condition_operator = conditions.pop(0) for condition in conditions: if condition[0] in ['and', 'or']: res = pass_conditions(all_info, current_path, condition, unknown_as_pass_condition) else: # Conditions are formed as "path to value", "type of test", "value(s) for test" path_to_value, test_name, test_values = condition path_to_value = fix_path_string(all_info, current_path, path_to_value) target_obj = get_value_at(all_info, current_path, path_to_value) if type(test_values) != list and type(test_values) != dict: dynamic_value = re_get_value_at.match(test_values) if dynamic_value: test_values = get_value_at(all_info, current_path, dynamic_value.groups()[0], True) try: res = pass_condition(target_obj, test_name, test_values) except Exception as e: res = True if unknown_as_pass_condition else False print_exception('Unable to process testcase \'%s\' on value \'%s\', interpreted as %s: %s' % (test_name, str(target_obj), res, e)) # Quick exit and + false if condition_operator == 'and' and not res: return False # Quick exit or + true if condition_operator == 'or' and res: return True return not condition_operator == 'or'
def _parse_bucket(self, raw_bucket): bucket_dict = {} bucket_dict['id'] = get_non_provider_id(raw_bucket.id) bucket_dict['name'] = raw_bucket.name bucket_dict['project_id'] = self.project_id bucket_dict['project_number'] = raw_bucket.project_number bucket_dict['creation_date'] = raw_bucket.time_created bucket_dict['location'] = raw_bucket.location bucket_dict['storage_class'] = raw_bucket.storage_class.lower() bucket_dict['versioning_enabled'] = raw_bucket.versioning_enabled bucket_dict['logging_enabled'] = raw_bucket.logging is not None iam_configuration = raw_bucket.iam_configuration.get('uniformBucketLevelAccess') or \ raw_bucket.iam_configuration.get('bucketPolicyOnly') if iam_configuration: bucket_dict['uniform_bucket_level_access'] = iam_configuration.get( "enabled", False) else: bucket_dict['uniform_bucket_level_access'] = None if bucket_dict['uniform_bucket_level_access']: bucket_dict['acls'] = [] bucket_dict['default_object_acl'] = [] else: try: bucket_dict['acls'] = list(raw_bucket.acl) except Exception as e: print_exception( 'Failed to retrieve storage bucket ACLs: {}'.format(e)) bucket_dict['acls'] = [] try: bucket_dict['default_object_acl'] = list( raw_bucket.default_object_acl) except Exception as e: print_exception( 'Failed to retrieve storage bucket object ACLs: {}'.format( e)) bucket_dict['default_object_acl'] = [] bucket_dict['acl_configuration'] = self._get_cloudstorage_bucket_acl( raw_bucket) # FIXME this should be "IAM" return bucket_dict['id'], bucket_dict
async def _get_and_set_instance_tags(self, instance: {}, region: str): client = AWSFacadeUtils.get_client('rds', self.session, region) account_id = get_aws_account_id(self.session) try: instance_tagset = await run_concurrently( lambda: client.list_tags_for_resource( ResourceName="arn:aws:rds:" + region + ":" + account_id + ":db:" + instance['DBInstanceIdentifier'])) instance['Tags'] = { x['Key']: x['Value'] for x in instance_tagset['TagList'] } except ClientError as e: if e.response['Error']['Code'] != 'NoSuchTagSet': print_exception('Failed to get db instance tags for %s: %s' % (instance['DBInstanceIdentifier'], e)) except Exception as e: print_exception('Failed to get db instance tags for %s: %s' % (instance['DBInstanceIdentifier'], e)) instance['Tags'] = {}
async def _get_and_set_db_parameters(self, parameter_group: {}, region: str): name = parameter_group['DBParameterGroupName'] try: parameters = await AWSFacadeUtils.get_all_pages( 'rds', region, self.session, 'describe_db_parameters', 'Parameters', DBParameterGroupName=name) parameter_group['Parameters'] = {} for parameter in parameters: # Discard non-modifiable parameters if not parameter['IsModifiable']: continue parameter_name = parameter.pop('ParameterName') parameter_group['Parameters'][parameter_name] = parameter except Exception as e: print_exception(f'Failed fetching DB parameters for {name}: {e}')
def _set_s3_bucket_secure_transport(self, bucket: {}): try: if 'policy' in bucket: bucket['secure_transport_enabled'] = False for statement in bucket['policy']['Statement']: # evaluate statement to see if it contains a condition disallowing HTTP transport # TODO this might not cover all cases if 'Condition' in statement and \ 'Bool' in statement['Condition'] and \ 'aws:SecureTransport' in statement['Condition']['Bool'] and \ ((statement['Condition']['Bool']['aws:SecureTransport'] == 'false' and statement['Effect'] == 'Deny') or (statement['Condition']['Bool']['aws:SecureTransport'] == 'true' and statement['Effect'] == 'Allow')): bucket['secure_transport_enabled'] = True else: bucket['secure_transport_enabled'] = False except Exception as e: print_exception('Failed to evaluate bucket policy for %s: %s' % (bucket['Name'], e)) bucket['secure_transport'] = None
def save_blob_as_json(filename, blob, force_write): """ Creates/Modifies file and saves python object as JSON :param filename: :param blob: :param force_write: :return: """ try: if prompt_overwrite(filename, force_write): with open(filename, 'wt') as f: print_info('%s' % json.dumps(blob, indent=4, separators=(',', ': '), sort_keys=True, cls=CustomJSONEncoder)) except Exception as e: print_exception(e)
async def get_instance_api_termination(self, region: str, instance_id: str): ec2_client = AWSFacadeUtils.get_client('ec2', self.session, region) try: user_data_response = await run_concurrently( lambda: ec2_client.describe_instance_attribute(Attribute='disableApiTermination', InstanceId=instance_id)) print("****** Attribute *******") print(user_data_response) print() print(user_data_response['DisableApiTermination']['Value']) print() print(str(user_data_response['DisableApiTermination']['Value'])) print("**** Attribute End *****") except Exception as e: print_exception( 'Failed to describe EC2 instance attributes: {}'.format(e)) return None else: if 'Value' not in user_data_response['DisableApiTermination'].keys(): return None return str(user_data_response['DisableApiTermination']['Value'])
async def get_identity_policies(self, region: str, identity_name: str): ses_client = AWSFacadeUtils.get_client('ses', self.session, region) try: policy_names = await run_concurrently( lambda: ses_client.list_identity_policies(Identity=identity_name)['PolicyNames'] ) except Exception as e: print_exception('Failed to list SES policies: {}'.format(e)) policy_names = [] if len(policy_names) == 0: return {} try: return await run_concurrently( lambda: ses_client.get_identity_policies(Identity=identity_name, PolicyNames=policy_names)['Policies'] ) except Exception as e: print_exception('Failed to get SES policies: {}'.format(e)) return None
def __open_file(self, config_filename, force_write, quiet=False): """ :param config_filename: :param force_write: :param quiet: :return: """ if not quiet: print_info('Saving config...') if prompt_4_overwrite(config_filename, force_write): try: config_dirname = os.path.dirname(config_filename) if not os.path.isdir(config_dirname): os.makedirs(config_dirname) return open(config_filename, 'wt') except Exception as e: print_exception(e) else: return None
async def get_server_azure_ad_administrators(self, resource_group_name, server_name, subscription_id: str): try: client = self.get_client(subscription_id) return await run_concurrently( lambda: client.server_azure_ad_administrators.get( resource_group_name, server_name)) except CloudError as e: # No AD admin configured returns a 404 error: if e.status_code != 404: print_exception( 'Failed to retrieve server azure ad administrators: {}'. format(e)) return None except Exception as e: print_exception( 'Failed to retrieve server azure ad administrators: {}'.format( e)) return None
def get_s3_public_access_block(self, account_id): # We need a region to generate the client # However, the settings are global, so they are not region-dependent region = 'us-east-1' client = AWSFacadeUtils.get_client('s3control', self.session, region) try: s3_public_access_block = client.get_public_access_block(AccountId=account_id) return s3_public_access_block['PublicAccessBlockConfiguration'] except ClientError: # No public access block configuration at the S3 level, returning the default return { "BlockPublicAcls": False, "IgnorePublicAcls": False, "BlockPublicPolicy": False, "RestrictPublicBuckets": False } except Exception as e: print_exception( 'Failed to get the public access block configuration for the account %s: %s' % (account_id, e)) return None
def __open_file(config_filename, force_write): """ :param config_filename: :param force_write: :param quiet: :return: """ if prompt_for_overwrite(config_filename, force_write): try: config_dirname = os.path.dirname(config_filename) if not os.path.isdir(config_dirname): os.makedirs(config_dirname) if os.path.exists(config_filename): os.remove(config_filename) return SqliteDict(config_filename) except Exception as e: print_exception(e) else: return None
async def get_snapshots(self, region: str): filters = [{'Name': 'owner-id', 'Values': [self.owner_id]}] try: snapshots = await AWSFacadeUtils.get_all_pages( 'ec2', region, self.session, 'describe_snapshots', 'Snapshots', Filters=filters) except Exception as e: print_exception('Failed to get snapshots: {}'.format(e)) snapshots = [] else: await get_and_set_concurrently( [self._get_and_set_snapshot_attributes], snapshots, region=region) finally: return snapshots
async def get_stacks(self, region: str): try: stacks = await AWSFacadeUtils.get_all_pages( 'cloudformation', region, self.session, 'list_stacks', 'StackSummaries') except Exception as e: print_exception(f'Failed to get CloudFormation stack: {e}') stacks = [] else: stacks = [ stack for stack in stacks if not CloudFormation._is_stack_deleted(stack) ] await get_and_set_concurrently([ self._get_and_set_description, self._get_and_set_template, self._get_and_set_policy, self._get_stack_notifications ], stacks, region=region) finally: return stacks
def get_tenant_id(self): if self.tenant_id: return self.tenant_id elif 'tenant_id' in self.aad_graph_credentials.token: return self.aad_graph_credentials.token['tenant_id'] else: # This is a last resort, e.g. for MSI authentication try: h = { 'Authorization': 'Bearer {}'.format( self.arm_credentials.token['access_token']) } r = requests.get( 'https://management.azure.com/tenants?api-version=2020-01-01', headers=h) r2 = r.json() return r2.get('value')[0].get('tenantId') except Exception as e: print_exception('Unable to infer tenant ID: {}'.format(e)) return None
async def list_keys(self, project_id: str, location: str, keyring_name: str): try: parent = self.cloud_client.key_ring_path(project_id, location, keyring_name) # keys = await run_concurrently(lambda: list(self.cloud_client.list_crypto_keys(parent))) # return keys kms_client = self._get_client() cryptokeys = kms_client.projects().locations().keyRings( ).cryptoKeys() request = cryptokeys.list(parent=parent) return await GCPFacadeUtils.get_all('cryptoKeys', request, cryptokeys) except Exception as e: print_exception( 'Failed to retrieve KMS keys for key ring {}: {}'.format( keyring_name, e)) return []
async def _get_and_set_s3_bucket_location(self, bucket: {}, region=None): client = AWSFacadeUtils.get_client('s3', self.session, region) try: location = await run_concurrently( lambda: client.get_bucket_location(Bucket=bucket['Name'])) except Exception as e: print_exception('Failed to get bucket location for {}: {}'.format( bucket['Name'], e)) location = None if location: region = location['LocationConstraint'] if location[ 'LocationConstraint'] else 'us-east-1' # Fixes issue #59: location constraint can be either EU or eu-west-1 for Ireland... if region == 'EU': region = 'eu-west-1' else: region = None bucket['region'] = region
async def get_instance_user_data(self, region: str, instance_id: str): ec2_client = AWSFacadeUtils.get_client('ec2', self.session, region) try: user_data_response = await run_concurrently( lambda: ec2_client.describe_instance_attribute( Attribute='userData', InstanceId=instance_id)) except Exception as e: print_exception( 'Failed to describe EC2 instance attributes: {}'.format(e)) return None else: if 'Value' not in user_data_response['UserData'].keys(): return None else: value = base64.b64decode( user_data_response['UserData']['Value']) if value[0:2] == b'\x1f\x8b': # GZIP magic number return zlib.decompress(value, zlib.MAX_WBITS | 32).decode('utf-8') else: return value.decode('utf-8')
def parse_parameter_group(self, global_params, region, parameter_group): parameter_group['arn'] = parameter_group.pop('DBParameterGroupArn') parameter_group['name'] = parameter_group.pop('DBParameterGroupName') api_client = api_clients[region] try: parameters = handle_truncated_response(api_client.describe_db_parameters, {'DBParameterGroupName': parameter_group['name']}, ['Parameters'])[ 'Parameters'] manage_dictionary(parameter_group, 'parameters', {}) for parameter in parameters: if not parameter['IsModifiable']: # Discard non-modifiable parameters continue parameter_name = parameter.pop('ParameterName') parameter_group['parameters'][parameter_name] = parameter except Exception as e: print_exception(e) print_error('Failed fetching DB parameters for %s' % parameter_group['name']) # Save parameter_group_id = self.get_non_provider_id(parameter_group['name']) self.parameter_groups[parameter_group_id] = parameter_group
def process_results_file(f, region): try: formatted_findings_list = [] results = results_file_to_dict(f) aws_account_id = results["account_id"] creation_date = datetime.datetime.strptime( results["last_run"]["time"], '%Y-%m-%d %H:%M:%S%z').isoformat() for service in results.get('service_list'): for finding_key, finding_value in results.get( 'services', {}).get(service).get('findings').items(): if finding_value.get('items'): formatted_finding = format_finding_to_securityhub_format( aws_account_id, region, creation_date, finding_key, finding_value) formatted_findings_list.append(formatted_finding) return formatted_findings_list except Exception as e: print_exception(f'Unable to process results file: {e}')
async def get_distributions(self): client = AWSFacadeUtils.get_client('cloudfront',self.session) # When no cloudfront distribution exists, we first need to initiate the creation # of a new distributions generate_credential_report by calling # client.list_distributions and then check for COMPLETE status before trying to download it: aws_cloudfront_api_called, n_attempts = False, 3 try: while not aws_cloudfront_api_called and n_attempts > 0: response = await run_concurrently(client.list_distributions) if 'ResponseMetadata' in response: aws_cloudfront_api_called = True else: n_attempts -= 1 await asyncio.sleep(0.1) # Wait for 100ms before doing a new attempt. except Exception as e: print_exception('Failed to call aws cloudfront api: {}'.format(e)) return [] finally: if not aws_cloudfront_api_called and n_attempts == 0: print_exception('Failed to call aws cloudfront api in {} attempts'.format(n_attempts)) return [] try: return response.get('DistributionList', {}).get('Items', []) except Exception as e: print_exception(f'Failed to get CloudFront distribution lists: {e}') return []
async def get_response(client, request): try: response = await run_concurrently( lambda: client.do_action_with_exception(request)) response_decoded = json.loads(response) truncated = response_decoded.get('IsTruncated', False) # handle truncated responses while truncated: request.set_Marker(response_decoded['Marker']) response_latest = await run_concurrently( lambda: client.do_action_with_exception(request)) response_latest_decoded = json.loads(response_latest) truncated = response_latest_decoded.get('IsTruncated', False) response_decoded = await merge_responses(response_decoded, response_latest_decoded) return response_decoded except ServerException as e: if False: # TODO define exceptions to handle print_exception(e) else: raise except ClientException as e: if False: # TODO define exceptions to handle print_exception(e) else: raise except Exception as e: print_exception(f'Unhandled exception {e} for request {request}')