async def _get_and_set_s3_bucket_versioning(self, bucket): client = AWSFacadeUtils.get_client('s3', self.session, bucket['region']) try: versioning = await run_concurrently( lambda: client.get_bucket_versioning(Bucket=bucket['Name'])) bucket['versioning_status_enabled'] = self._status_to_bool( versioning.get('Status')) bucket['version_mfa_delete_enabled'] = self._status_to_bool( versioning.get('MFADelete')) except Exception: bucket['versioning_status_enabled'] = None bucket['version_mfa_delete_enabled'] = None
async def _get_and_set_key_manager(self, volume: {}, region: str): kms_client = AWSFacadeUtils.get_client('kms', self.session, region) if 'KmsKeyId' in volume: key_id = volume['KmsKeyId'] try: volume['KeyManager'] = await run_concurrently( lambda: kms_client.describe_key(KeyId=key_id)[ 'KeyMetadata']['KeyManager']) except Exception as e: print_exception('Failed to describe KMS key: {}'.format(e)) volume['KeyManager'] = None else: volume['KeyManager'] = None
async def _get_and_set_snapshot_attributes(self, snapshot: {}, region: str): client = AWSFacadeUtils.get_client('rds', self.session, region) try: attributes = await run_concurrently( lambda: client.describe_db_snapshot_attributes( DBSnapshotIdentifier=snapshot['DBSnapshotIdentifier'])[ 'DBSnapshotAttributesResult']) snapshot['Attributes'] =\ attributes['DBSnapshotAttributes'] if 'DBSnapshotAttributes' in attributes else {} except Exception as e: print_exception( 'Failed to describe RDS snapshot attributes: {}'.format(e))
async def _get_and_set_s3_bucket_logging(self, bucket: {}): client = AWSFacadeUtils.get_client('s3', self.session, bucket['region'], ) try: logging = await run_concurrently(lambda: client.get_bucket_logging(Bucket=bucket['Name'])) except Exception as e: print_exception('Failed to get logging configuration for %s: %s' % (bucket['Name'], e)) bucket['logging'] = 'Unknown' else: if 'LoggingEnabled' in logging: bucket['logging'] = \ logging['LoggingEnabled']['TargetBucket'] + '/' + logging['LoggingEnabled']['TargetPrefix'] else: bucket['logging'] = 'Disabled'
async def _get_stack_notifications(self, stack: {}, region: str): client = AWSFacadeUtils.get_client('cloudformation', self.session, region) try: stack_notifications = await run_concurrently( lambda: client.describe_stacks(StackName=stack['StackName'])[ 'Stacks']) except Exception as e: print_exception(f'Failed to describe CloudFormation stack: {e}') else: if 'NotificationARNs' in stack_notifications: stack['NotificationARNs'] = stack_notifications[ 'NotificationARNs']
async def _get_queue_attributes(self, queue_url: str, region: str, attribute_names: []): sqs_client = AWSFacadeUtils.get_client('sqs', self.session, region) try: queue_attributes = await run_concurrently( lambda: sqs_client.get_queue_attributes( QueueUrl=queue_url, AttributeNames=attribute_names)[ 'Attributes']) except Exception as e: print_exception('Failed to get SQS queue attributes: {}'.format(e)) raise return queue_url, queue_attributes
async def _get_and_set_instance_clusters(self, instance: {}, region: str): client = AWSFacadeUtils.get_client('rds', self.session, region) if 'DBClusterIdentifier' in instance: cluster_id = instance['DBClusterIdentifier'] try: clusters = await run_concurrently( lambda: client.describe_db_clusters(DBClusterIdentifier= cluster_id)) cluster = clusters['DBClusters'][0] instance['MultiAZ'] = cluster['MultiAZ'] except Exception as e: print_exception( 'Failed to describe RDS clusters: {}'.format(e))
async def _get_and_set_instance_tags(self, instance: {}, region: str): client = AWSFacadeUtils.get_client('rds', self.session, region) account_id = get_aws_account_id(self.session) try: instance_tagset = await run_concurrently(lambda: client.list_tags_for_resource( ResourceName="arn:aws:rds:"+region+":"+account_id+":db:"+instance['DBInstanceIdentifier'])) instance['Tags'] = {x['Key']: x['Value'] for x in instance_tagset['TagList']} except ClientError as e: if e.response['Error']['Code'] != 'NoSuchTagSet': print_exception('Failed to get db instance tags for {}: {}'.format(instance['DBInstanceIdentifier'], e)) except Exception as e: print_exception('Failed to get db instance tags for {}: {}'.format(instance['DBInstanceIdentifier'], e)) instance['Tags'] = {}
async def get_instance_user_data(self, region: str, instance_id: str): ec2_client = AWSFacadeUtils.get_client('ec2', self.session, region) try: user_data_response = await run_concurrently( lambda: ec2_client.describe_instance_attribute(Attribute='userData', InstanceId=instance_id)) except Exception as e: print_exception( 'Failed to describe EC2 instance attributes: {}'.format(e)) return None else: if 'Value' not in user_data_response['UserData'].keys(): return None return base64.b64decode(user_data_response['UserData']['Value']).decode('utf-8')
async def _get_project_details(self, project: str, region: str): codebuild_client = AWSFacadeUtils.get_client('codebuild', self.session, region) try: project_details = await run_concurrently( lambda: codebuild_client.batch_get_projects(names=[project])) except Exception as e: print_exception(f'Failed to get CodeBuild project details: {e}') return {} else: project_details.pop('ResponseMetadata') project_details.pop('projectsNotFound') return project_details
async def get_buckets(self): try: # If there are regions specified, try for each of them until one works. # Otherwise, try all the available regions until one works. # This is required in case there's an IAM policy that denies access to APIs on a regional basis, # as per https://github.com/nccgroup/ScoutSuite/issues/727 buckets = [] exception = None region_list = self.regions if self.regions else await run_concurrently( lambda: self.session.get_available_regions('s3')) for region in region_list: try: client = AWSFacadeUtils.get_client('s3', self.session, region) buckets = await run_concurrently( lambda: client.list_buckets()['Buckets']) except Exception as e: exception = e else: exception = None # Fix for https://github.com/nccgroup/ScoutSuite/issues/916#issuecomment-728783965 break if not buckets: if exception: print_exception(f'Failed to list buckets: {exception}') return [] except Exception as e: print_exception(f'Failed to list buckets: {e}') return [] else: # We need first to retrieve bucket locations before retrieving bucket details await get_and_set_concurrently( [self._get_and_set_s3_bucket_location], buckets, region=region) # Then we can retrieve bucket details concurrently await get_and_set_concurrently([ self._get_and_set_s3_bucket_logging, self._get_and_set_s3_bucket_versioning, self._get_and_set_s3_bucket_webhosting, self._get_and_set_s3_bucket_default_encryption, self._get_and_set_s3_acls, self._get_and_set_s3_bucket_policy, self._get_and_set_s3_bucket_tags, self._get_and_set_s3_bucket_block_public_access ], buckets) # Non-async post-processing for bucket in buckets: self._set_s3_bucket_secure_transport(bucket) # Try to update CreationDate of all buckets with the correct values from 'us-east-1' self._get_and_set_s3_bucket_creationdate(buckets) return buckets
async def get_subnets(self, region: str, vpc: str): ec2_client = AWSFacadeUtils.get_client('ec2', self.session, region) filters = [{'Name': 'vpc-id', 'Values': [vpc]}] try: subnets = await run_concurrently( lambda: ec2_client.describe_subnets(Filters=filters)['Subnets'] ) except Exception as e: print_exception('Failed to describe EC2 subnets: {}'.format(e)) return None else: await get_and_set_concurrently( [self._get_and_set_subnet_flow_logs], subnets, region=region) return subnets
async def _describe_secrets(self, secret: str, region: str): client = AWSFacadeUtils.get_client('secretsmanager', self.session, region) try: secret_description = await run_concurrently( lambda: client.describe_secret(SecretId=secret.get('ARN'))) except Exception as e: print_exception( 'Failed to get Secrets Manager secret details: {}'.format(e)) return secret else: secret_description.pop('ResponseMetadata') return secret_description
async def _get_policies(self, load_balancer: dict, region: str): if len(load_balancer['policy_names']) == 0: return [] elb_client = AWSFacadeUtils.get_client('elb', self.session, region) try: return await run_concurrently( lambda: elb_client.describe_load_balancer_policies( LoadBalancerName=load_balancer['LoadBalancerName'], PolicyNames=load_balancer['policy_names'])[ 'PolicyDescriptions']) except Exception as e: print_exception(f'Failed to retrieve load balancer policies: {e}') return []
async def get_security_groups(self, region): client = AWSFacadeUtils.get_client('elasticache', self.session, region) try: return await AWSFacadeUtils.get_all_pages( 'elasticache', region, self.session, 'describe_cache_security_groups', 'CacheSecurityGroups') except client.exceptions.InvalidParameterValueException: # Recent account are not allowed to use security groups at this level. Calling # describe_cache_security_groups will throw an InvalidParameterValueException exception. pass return []
async def _get_and_set_s3_bucket_webhosting(self, bucket: {}): client = AWSFacadeUtils.get_client('s3', self.session, bucket['region']) try: result = await run_concurrently( lambda: client.get_bucket_website(Bucket=bucket['Name'])) bucket['web_hosting_enabled'] = 'IndexDocument' in result except Exception as e: if "NoSuchWebsiteConfiguration" in str(e): bucket['web_hosting_enabled'] = False else: print_exception( 'Failed to get web hosting configuration for {}: {}'. format(bucket['Name'], e))
async def _get_and_set_user_login_profile(self, user: {}): client = AWSFacadeUtils.get_client('iam', self.session) try: user['LoginProfile'] = await run_concurrently( lambda: client.get_login_profile(UserName=user['UserName'])[ 'LoginProfile']) except ClientError as e: if e.response["Error"]["Code"] == "NoSuchEntity": # If the user has not been assigned a password, the operation returns a 404 (NoSuchEntity ) error. pass else: print_exception('Failed to get login profile: {}'.format(e)) except Exception as e: print_exception('Failed to get login profile: {}'.format(e))
async def _get_and_set_user_tags(self, user: {}): client = AWSFacadeUtils.get_client('iam', self.session) try: temp = await run_concurrently(lambda: client.list_user_tags( UserName=user['UserName'])['Tags']) tags = dict() for element in temp: tags[element['Key']] = element['Value'] user['Tags'] = tags except ClientError as e: print_exception('Failed to get user tags: {}'.format(e)) except Exception as e: print_exception('Failed to get user tags: {}'.format(e))
async def get_credential_reports(self): client = AWSFacadeUtils.get_client('iam', self.session) # When no credential report exists, we first need to initiate the creation of a new report by calling # client.generate_credential_report and then check for COMPLETE status before trying to download it: report_generated, n_attempts = False, 3 try: while not report_generated and n_attempts > 0: response = await run_concurrently( client.generate_credential_report) if response['State'] == 'COMPLETE': report_generated = True else: n_attempts -= 1 await asyncio.sleep( 0.1) # Wait for 100ms before doing a new attempt. except Exception as e: print_exception( 'Failed to generate credential report: {}'.format(e)) return [] finally: if not report_generated and n_attempts == 0: print_exception( 'Failed to complete credential report generation in {} attempts' .format(n_attempts)) return [] try: report = await run_concurrently( lambda: client.get_credential_report()['Content']) # The report is a CSV string. The first row contains the name of each column. The next rows # each represent an individual account. This algorithm provides a simple initial parsing. lines = report.splitlines() keys = lines[0].decode('utf-8').split(',') credential_reports = [] for line in lines[1:]: credential_report = {} values = line.decode('utf-8').split(',') for key, value in zip(keys, values): credential_report[key] = value credential_reports.append(credential_report) return credential_reports except Exception as e: print_exception( 'Failed to download credential report: {}'.format(e)) return []
async def get_projects(self, region: str): codebuild_client = AWSFacadeUtils.get_client('codebuild', self.session, region) try: projects = await run_concurrently( lambda: codebuild_client.list_projects()['projects']) except Exception as e: print_exception(f'Failed to get CodeBuild projects: {e}') return [] else: if not projects: return [] return await map_concurrently(self._get_project_details, projects, region=region)
async def _get_and_set_s3_bucket_policy(self, bucket): client = AWSFacadeUtils.get_client('s3', self.session, bucket['region']) try: bucket_policy = await run_concurrently( lambda: client.get_bucket_policy(Bucket=bucket['Name'])) bucket['policy'] = json.loads(bucket_policy['Policy']) except ClientError as e: if e.response['Error']['Code'] != 'NoSuchBucketPolicy': print_exception('Failed to get bucket policy for %s: %s' % (bucket['Name'], e)) except Exception as e: print_exception('Failed to get bucket policy for %s: %s' % (bucket['Name'], e)) bucket['grantees'] = {}
async def get_trails(self, region): client = AWSFacadeUtils.get_client('cloudtrail', self.session, region) try: trails = await run_concurrently( lambda: client.describe_trails()['trailList']) except Exception as e: print_exception(f'Failed to describe CloudTrail trail: {e}') trails = [] else: await get_and_set_concurrently( [self._get_and_set_status, self._get_and_set_selectors], trails, region=region) finally: return trails
async def _get_and_set_s3_bucket_default_encryption(self, bucket: {}): bucket_name = bucket['Name'] client = AWSFacadeUtils.get_client('s3', self.session, bucket['region']) try: await run_concurrently(lambda: client.get_bucket_encryption(Bucket=bucket['Name'])) bucket['default_encryption_enabled'] = True except ClientError as e: if 'ServerSideEncryptionConfigurationNotFoundError' in e.response['Error']['Code']: bucket['default_encryption_enabled'] = False else: bucket['default_encryption_enabled'] = None print_exception('Failed to get encryption configuration for %s: %s' % (bucket_name, e)) except Exception as e: print_exception('Failed to get encryption configuration for %s: %s' % (bucket_name, e)) bucket['default_encryption'] = 'Unknown'
async def _get_and_set_s3_bucket_block_public_access(self, bucket: {}): client = AWSFacadeUtils.get_client('s3', self.session, bucket['region']) try: bucket_public_access_block_conf = await run_concurrently( lambda: client.get_public_access_block(Bucket=bucket['Name'])) bucket[ 'public_access_block_configuration'] = bucket_public_access_block_conf[ 'PublicAccessBlockConfiguration'] except ClientError as e: # No such configuration found for the bucket, nothing to be done pass except Exception as e: print_exception( 'Failed to get the public access block configuration for {}: {}' .format(bucket['Name'], e))
async def _get_and_set_s3_bucket_versioning(self, bucket: {}): client = AWSFacadeUtils.get_client('s3', self.session, bucket['region']) try: versioning = await run_concurrently( lambda: client.get_bucket_versioning(Bucket=bucket['Name'])) bucket['versioning_status_enabled'] = self._status_to_bool( versioning.get('Status')) bucket['version_mfa_delete_enabled'] = self._status_to_bool( versioning.get('MFADelete')) except Exception as e: print_exception( 'Failed to get versioning configuration for {}: {}'.format( bucket['Name'], e)) bucket['versioning_status_enabled'] = None bucket['version_mfa_delete_enabled'] = None
async def _get_and_set_secret_policy(self, secret: {}, region: str): client = AWSFacadeUtils.get_client('secretsmanager', self.session, region) try: policy = await run_concurrently( lambda: client.get_resource_policy(SecretId=secret.get('ARN'))) policy_json = policy.get('ResourcePolicy') if policy_json: secret['policy'] = json.loads(policy_json) else: secret['policy'] = {} except Exception as e: print_exception( 'Failed to get Secrets Manager secret policy: {}'.format(e)) secret['policy'] = {}
async def get_queues(self, region: str, attribute_names: []): sqs_client = AWSFacadeUtils.get_client('sqs', self.session, region) try: raw_queues = await run_concurrently(sqs_client.list_queues) except Exception as e: print_exception('Failed to list SQS queues: {}'.format(e)) return [] else: if 'QueueUrls' not in raw_queues: return [] queue_urls = raw_queues['QueueUrls'] return await map_concurrently(self._get_queue_attributes, queue_urls, region=region, attribute_names=attribute_names)
async def _get_table(self, table_name: str, region: str): client = AWSFacadeUtils.get_client('dynamodb', self.session, region) try: table = await run_concurrently( lambda: client.describe_table(TableName=table_name)['Table']) except Exception as e: print_exception('Failed to get DynamoDB table: {}'.format(e)) raise else: await get_and_set_concurrently([ self._get_and_set_backup, self._get_and_set_continuous_backups, self._get_and_set_tags ], [table], region=region) return table
async def _get_and_set_s3_bucket_location(self, bucket: {}, region=None): client = AWSFacadeUtils.get_client('s3', self.session, region) try: location = await run_concurrently(lambda: client.get_bucket_location(Bucket=bucket['Name'])) except Exception as e: print_exception('Failed to get bucket location for {}: {}'.format(bucket['Name'], e)) location = None if location: region = location['LocationConstraint'] if location['LocationConstraint'] else 'us-east-1' # Fixes issue #59: location constraint can be either EU or eu-west-1 for Ireland... if region == 'EU': region = 'eu-west-1' else: region = None bucket['region'] = region
async def _get_and_set_s3_bucket_tags(self, bucket: {}): client = AWSFacadeUtils.get_client('s3', self.session, bucket['region']) try: bucket_tagset = await run_concurrently( lambda: client.get_bucket_tagging(Bucket=bucket['Name'])) bucket['tags'] = { x['Key']: x['Value'] for x in bucket_tagset['TagSet'] } except ClientError as e: if e.response['Error']['Code'] != 'NoSuchTagSet': print_exception('Failed to get bucket tags for {}: {}'.format( bucket['Name'], e)) except Exception as e: print_exception('Failed to get bucket tags for {}: {}'.format( bucket['Name'], e)) bucket['tags'] = {}