else: subscription_data[attr] = False else: subscription_data[attr] = value return subscription_data def _import_subscriptions(proxy: ServiceProxy): subs_resp = proxy.list('list_subscriptions') if subs_resp is not None: for subscription_data in subs_resp[1]['Subscriptions']: if subscription_data['SubscriptionArn'] == 'PendingConfirmation': continue if subscription_data['SubscriptionArn'] == 'Deleted': continue yield 'Subscription', _import_subscription(proxy, subscription_data) def _import_sns_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: _log.info(f'importing SNS in {region}') if resource_gate(spec, 'Topic'): yield from _import_topics(proxy) if resource_gate(spec, 'Subscription'): yield from _import_subscriptions(proxy) SVC = RegionalService('sns', _import_sns_region)
vaultName=vault_name, accountId='-') if policy_resp is not None: policy_text = policy_resp.get('policy', {}).get('Policy') if policy_text is not None: vault['Policy'] = json.loads(policy_text) tags_resp = proxy.get('list_tags_for_vault', vaultName=vault_name, accountId='-') if tags_resp is not None: vault['Tags'] = tags_resp['Tags'] return vault def _import_vaults(proxy: ServiceProxy) -> Iterator[Tuple[str, Any]]: vaults_resp = proxy.list('list_vaults', accountId='-') if vaults_resp is not None: vault_list = vaults_resp[1]['VaultList'] for vault in vault_list: yield 'Vault', _import_vault(proxy, vault) def _import_glacier_region(proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Iterator[Tuple[str, Any]]: _log.info(f'import glacier in {region}') if resource_gate(spec, 'Vault'): yield from _import_vaults(proxy) SVC = RegionalService('glacier', _import_glacier_region)
def _import_nodegroups( proxy: ServiceProxy, cluster_name: str) -> Iterator[Tuple[str, Dict[str, Any]]]: nodegroups_resp = proxy.list('list_nodegroups', name=cluster_name) if nodegroups_resp is not None: nodegroup_names = nodegroups_resp[1].get('nodegroups', []) for nodegroup_name in nodegroup_names: yield 'Nodegroup', _import_nodegroup(proxy, cluster_name, nodegroup_name) def _import_clusters(proxy: ServiceProxy, region: str, spec: ServiceSpec): clusters_resp = proxy.list('list_clusters') if clusters_resp is not None: cluster_names = clusters_resp[1].get('clusters', []) for cluster_name in cluster_names: yield 'Cluster', _import_cluster(proxy, cluster_name) if resource_gate(spec, 'Nodegroup'): yield from _import_nodegroups(proxy, cluster_name) def _import_eks_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: if resource_gate(spec, 'Cluster'): _log.info(f'importing eks clusters {region}') yield from _import_clusters(proxy, region, spec) SVC = RegionalService('eks', _import_eks_region)
resources_resp = proxy.get('describe_environment_resources', EnvironmentName=environment['EnvironmentName']) if resources_resp is not None: environment.update(resources_resp['EnvironmentResources']) else: _log.warn(f'Failed to get environment resources for {arn}') return environment def _import_environments(proxy: ServiceProxy): environments_resp = proxy.list('describe_environments') if environments_resp is not None: environments = environments_resp[1]['Environments'] for environment in environments: yield 'Environment', _import_environment(proxy, environment) def _import_elasticbeanstalk_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: _log.info(f'importing ElasticBeanstalk {region}') if resource_gate(spec, 'Application'): yield from _import_applications(proxy) if resource_gate(spec, 'ApplicationVersion'): yield from _import_application_versions(proxy) if resource_gate(spec, 'Environment'): yield from _import_environments(proxy) SVC = RegionalService('elasticbeanstalk', _import_elasticbeanstalk_region)
raise GFNoAccess('sqs', 'get_queue_attributes') attrs = attrs_resp['Attributes'] attrs['url'] = queue_url attrs['Policy'] = json.loads(attrs.get('Policy', 'null')) tags_resp = proxy.get('list_queue_tags', QueueUrl=queue_url) if tags_resp is not None: attrs['Tags'] = tags_resp['Tags'] return attrs def _import_queues(proxy: ServiceProxy, region: str): queues_resp = proxy.list('list_queues') if queues_resp is not None: queue_urls = queues_resp[1].get('QueueUrls', []) for queue_url in queue_urls: try: yield 'Queue', _import_queue(proxy, queue_url) except GFNoAccess as e: _log.error(f'sqs error {region}', exc_info=e) def _import_sqs_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: if resource_gate(spec, 'Queue'): _log.info(f'importing sqs Queues in {region}') yield from _import_queues(proxy, region) SVC = RegionalService('sqs', _import_sqs_region)
yield from _import_tasks(proxy, cluster_arn) def _import_task_definition(proxy: ServiceProxy, definition_arn: str): definition = proxy.get('describe_task_definition', taskDefinition=definition_arn, include=['TAGS'])['taskDefinition'] return definition def _import_task_definitions(proxy: ServiceProxy): definitions_resp = proxy.list('list_task_definitions') if definitions_resp is not None: definition_arns = definitions_resp[1].get('taskDefinitionArns', []) for definition_arn in definition_arns: yield 'TaskDefinition', _import_task_definition( proxy, definition_arn) def _import_ecs_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: if resource_gate(spec, 'Cluster'): _log.info(f'importing ecs clusters') yield from _import_clusters(proxy, region, spec) if resource_gate(spec, 'TaskDefinition'): yield from _import_task_definitions(proxy) SVC = RegionalService('ecs', _import_ecs_region)
list_result = proxy.list(resource, **kwargs) if list_result is not None: if is_tags: tag_descriptions = list_result[1]['TagDescriptions'] if len(tag_descriptions) > 0: result['Tags'] = tag_descriptions[0]['Tags'] else: result['Tags'] = [] else: key = canonical_name[len('DescribeLoadBalancer'):] result[key] = list_result[1] return result def _is_v2(arn) -> bool: return ':loadbalancer/net/' in arn or ':loadbalancer/app/' in arn def _import_elb_region(proxy: ServiceProxy, region: str, spec: ServiceSpec): if resource_gate(spec, 'LoadBalancer'): _log.info(f'Importing {region} load balancers') result = proxy.list('describe_load_balancers') if result is not None: elbs = result[1] for elb in elbs.get('LoadBalancerDescriptions', []): if not _is_v2(elb['LoadBalancerName']): yield 'LoadBalancer', import_elb(proxy, elb) SVC = RegionalService('elb', _import_elb_region)
policies = {} for i in range(0, len(policy_names), 20): names = policy_names[i:i + 20] policy_resp = proxy.get('get_identity_policies', Identity=identity_name, PolicyNames=names) policy_map = policy_resp.get('Policies', {}) for name, policy_string in policy_map.items(): policies[name] = json.loads(policy_string) identity['Policies'] = policies return identity def _import_identities(proxy: ServiceProxy, region: str): identities_resp = proxy.list('list_identities') if identities_resp is not None: identities = identities_resp[1]['Identities'] for identity in identities: yield 'Identity', _import_identity(proxy, identity) def _import_ses_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: if resource_gate(spec, 'Identity'): _log.info(f'importing identities {region}') yield from _import_identities(proxy, region) SVC = RegionalService('ses', _import_ses_region)
repositoryName=name, registryId=registry) if policy_resp is not None: repository['Policy'] = json.loads(policy_resp['policyText']) except botocore.exceptions.ClientError as e: code = e.response.get('Error', {}).get('Code') if code == 'RepositoryPolicyNotFoundException': repository['Policy'] = None else: raise return repository def _import_repositories(proxy: ServiceProxy): repositories_resp = proxy.list('describe_repositories') if repositories_resp is not None: repositories = repositories_resp[1]['repositories'] for repository in repositories: yield 'Repository', _import_repository(proxy, repository) def _import_ecr_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: _log.info(f'importing repositories {region}') if resource_gate(spec, 'Repository'): yield from _import_repositories(proxy) SVC = RegionalService('ecr', _import_ecr_region)
tables_resp = proxy.list('list_tables') if tables_resp is not None: table_names = tables_resp[1].get('TableNames', []) for table_name in table_names: try: yield 'Table', _import_table(proxy, table_name) except GFNoAccess as e: _log.error(f'dynamodb error {region}', exc_info=e) def _import_backups(proxy: ServiceProxy, region: str): backups_resp = proxy.list('list_backups') if backups_resp is not None: backups = backups_resp[1]['BackupSummaries'] for backup in backups: yield 'Backup', backup def _import_dynamodb_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: if resource_gate(spec, 'Table'): _log.info(f'importing dynamodb tables {region}') yield from _import_tables(proxy, region) if resource_gate(spec, 'Backup'): _log.info(f'importing dynamodb backups {region}') yield from _import_backups(proxy, region) SVC = RegionalService('dynamodb', _import_dynamodb_region)
_log = logging.getLogger(__name__) def _import_secret(proxy: ServiceProxy, secret: Dict[str, Any]): arn = secret['ARN'] policy_resp = proxy.get('get_resource_policy', SecretId=arn) if policy_resp is not None: policy_text = policy_resp.get('ResourcePolicy') if policy_text is not None: secret['Policy'] = json.loads(policy_text) return secret def _import_secrets(proxy: ServiceProxy) -> Iterator[Tuple[str, Any]]: secrets_resp = proxy.list('list_secrets') if secrets_resp is not None: secrets_list = secrets_resp[1]['SecretList'] for secret in secrets_list: yield 'Secret', _import_secret(proxy, secret) def _import_secretsmanager_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Iterator[Tuple[str, Any]]: _log.info(f'import secretsmanager in {region}') if resource_gate(spec, 'Secret'): yield from _import_secrets(proxy) SVC = RegionalService('secretsmanager', _import_secretsmanager_region)
if result is not None: if resource == 'describe_instances': _add_user_data(proxy, result[1]) elif resource == 'describe_snapshots': _add_launch_permissions(proxy, result[1]) elif resource == 'describe_images': _add_image_attributes(proxy, result[1]) elif resource == 'describe_security_groups': _add_security_group_references(proxy, result[1]) yield result[0], result[1] _log.info(f'done with {resource}') if resource_gate(spec, 'Defaults'): yield from _synthesize_defaults(proxy, region) SVC = RegionalService('ec2', _import_ec2_region) def add_amis_to_import_job(proxy: Proxy, writer: ImportWriter, ps: PathStack, region: str, amis: List[str]) -> str: ps = ps.scope(region) service_proxy = proxy.service('ec2', region) result = service_proxy.list( 'describe_images', ImageIds=amis, # Remove the default filters Filters=[]) _log.debug(f'describe images result {result}') if result is not None: resource_name = result[0] raw_resources = result[1]
layer_version['Policy'] = json.loads(policy_string) else: layer_version['Policy'] = None layer_version['PolicyRevisionId'] = policy.get('RevisionId') layer_version['Name'] = layer_name yield 'LayerVersion', layer_version def _import_layers(proxy: ServiceProxy): layers_resp = proxy.list('list_layers') if layers_resp is not None: layer_versions = layers_resp[1]['Layers'] for layer_version in layer_versions: layer_arn = layer_version['LayerArn'] yield from _import_layer(proxy, layer_arn, layer_version['LayerName']) def _import_lambda_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: if resource_gate(spec, 'Function'): _log.info(f'Importing functions in {region}') yield from _import_functions(proxy, spec) if resource_gate(spec, 'LayerVersion'): yield from _import_layers(proxy) # TODO: event sources SVC = RegionalService('lambda', _import_lambda_region)
_log = logging.getLogger(__name__) def _import_certificate(proxy: ServiceProxy, summary: Dict) -> Dict[str, Any]: arn = summary['CertificateArn'] certificate = proxy.get('describe_certificate', CertificateArn=arn)['Certificate'] tags_resp = proxy.list('list_tags_for_certificate', CertificateArn=arn) if tags_resp is not None: certificate['Tags'] = tags_resp[1]['Tags'] return certificate def _import_certificates(proxy: ServiceProxy): certificates_resp = proxy.list('list_certificates') if certificates_resp is not None: certificates = certificates_resp[1]['CertificateSummaryList'] for certificate in certificates: yield 'Certificate', _import_certificate(proxy, certificate) def _import_acm_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: _log.info(f'importing Certificates {region}') if resource_gate(spec, 'Certificate'): yield from _import_certificates(proxy) SVC = RegionalService('acm', _import_acm_region)
if trail is not None: # When you create a trail in the console you create a single trail. It can be multiregional # which means it runs in all regions. The console still shows this as one however the api will # return an object with the same ARN in every region. This is to squash that down to one. if (trail['IsMultiRegionTrail'] is False) or (trail['IsMultiRegionTrail'] and trail['HomeRegion'] == region): try: yield 'Trail', _import_trail(proxy, trail) except botocore.exceptions.ClientError as e: code = e.response.get('Error', {}).get('Code') # Some trails are visible to sub-accounts, but can't actually # be accessed. Ignore them, the master account will pick them up if code != 'TrailNotFoundException': raise else: _log.info( f'Skipping trail {trail["Name"]}, not found in this account' ) def _import_cloudtrail_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: _log.info(f'importing describe_trails in {region}') if resource_gate(spec, 'Trail'): yield from _import_trails(proxy, region) SVC = RegionalService('cloudtrail', _import_cloudtrail_region)
def _import_domain(proxy: ServiceProxy, domain_name: str) -> Dict[str, Any]: domain_resp = proxy.get('describe_elasticsearch_domain', DomainName=domain_name) if domain_resp is None: raise GFNoAccess('es', 'describe_elasticsearch_domain') domain = domain_resp['DomainStatus'] domain['AccessPolicies'] = json.loads(domain.get('AccessPolicies', 'null')) tags_resp = proxy.get('list_tags', ARN=domain['ARN']) if tags_resp is not None: domain['Tags'] = tags_resp['TagList'] return domain def _import_domains(proxy: ServiceProxy, region: str): domain_names_resp = proxy.list('list_domain_names') if domain_names_resp is not None: domain_infos = domain_names_resp[1].get('DomainNames', []) for domain_info in domain_infos: yield 'Domain', _import_domain(proxy, domain_info['DomainName']) def _import_es_region(proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Iterator[Tuple[str, Any]]: if resource_gate(spec, 'Domain'): _log.info(f'importing es domains in {region}') yield from _import_domains(proxy, region) SVC = RegionalService('es', _import_es_region)
lb[replaced] = transformed tags_resp = proxy.get('describe_tags', ResourceArns=[arn]) tag_descs = tags_resp.get('TagDescriptions', []) if len(tag_descs) == 1: lb['Tags'] = tag_descs[0]['Tags'] return lb def _import_loadbalancers(proxy: ServiceProxy, spec: ServiceSpec): lbs_resp = proxy.list('describe_load_balancers') if lbs_resp is not None: lbs = lbs_resp[1].get('LoadBalancers', []) for lb in lbs: yield 'LoadBalancer', _import_loadbalancer(proxy, lb) if resource_gate(spec, 'Listener'): yield from _import_listeners(proxy, lb['LoadBalancerArn']) def _import_elbv2_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: _log.info(f'importing elbv2 {region}') if resource_gate(spec, 'LoadBalancer'): yield from _import_loadbalancers(proxy, spec) if resource_gate(spec, 'TargetGroup'): yield from _import_target_groups(proxy) SVC = RegionalService('elbv2', _import_elbv2_region)
#'models' # Disabled, needs more permissions ] def _import_api(proxy: ServiceProxy, api: Dict) -> Dict[str, Any]: api_id = api['ApiId'] for resource in ApiResources: resp = proxy.list('get_' + resource, ApiId=api_id) if resp is not None: api[resource.capitalize()] = resp[1]['Items'] return api def _import_apis(proxy: ServiceProxy, region: str): apis_resp = proxy.list('get_apis') if apis_resp is not None: apis = apis_resp[1]['Items'] for api in apis: yield 'Api', _import_api(proxy, api) def _import_apigatewayv2_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: _log.info(f'importing Apis') if resource_gate(spec, 'Api'): yield from _import_apis(proxy, region) SVC = RegionalService('apigatewayv2', _import_apigatewayv2_region)
_log = logging.getLogger(__name__) def _import_launch_configurations(proxy: ServiceProxy): configs_resp = proxy.list('describe_launch_configurations') if configs_resp is not None: configs = configs_resp[1]['LaunchConfigurations'] for config in configs: yield 'LaunchConfiguration', config def _import_autoscaling_groups(proxy: ServiceProxy): groups_resp = proxy.list('describe_auto_scaling_groups') if groups_resp is not None: groups = groups_resp[1]['AutoScalingGroups'] for group in groups: yield 'AutoScalingGroup', group def _import_autoscaling_region(proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Iterator[Tuple[str, Any]]: _log.info(f'import autoscaling in {region}') if resource_gate(spec, 'AutoScalingGroup'): yield from _import_autoscaling_groups(proxy) if resource_gate(spec, 'LaunchConfiguration'): yield from _import_launch_configurations(proxy) SVC = RegionalService('autoscaling', _import_autoscaling_region)
from introspector.aws.svc import RegionalService, ServiceSpec, resource_gate _log = logging.getLogger(__name__) def _import_cluster(proxy: ServiceProxy, cluster: Dict) -> Dict[str, Any]: cluster_id = cluster['ClusterIdentifier'] logging_status = proxy.get('describe_logging_status', ClusterIdentifier=cluster_id) cluster['LoggingStatus'] = logging_status return cluster def _import_clusters(proxy: ServiceProxy, region: str): clusters_resp = proxy.list('describe_clusters') if clusters_resp is not None: clusters = clusters_resp[1]['Clusters'] for cluster in clusters: yield 'Cluster', _import_cluster(proxy, cluster) def _import_redshift_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: if resource_gate(spec, 'Cluster'): _log.info(f'importing Redshift Clusters in {region}') yield from _import_clusters(proxy, region) SVC = RegionalService('redshift', _import_redshift_region)
def _import_file_system(proxy: ServiceProxy, file_system: Dict[str, Any]): file_system_id = file_system['FileSystemId'] try: policy_resp = proxy.get('describe_file_system_policy', FileSystemId=file_system_id) if policy_resp is not None: file_system['Policy'] = json.loads(policy_resp['Policy']) except ClientError as e: code = e.response.get('Error', {}).get('Code') if code != 'AccessDeniedException' or 'PolicyNotFound': raise return file_system def _import_filesystems(proxy: ServiceProxy) -> Iterator[Tuple[str, Any]]: file_systems_resp = proxy.list('describe_file_systems') if file_systems_resp is not None: file_systems_list = file_systems_resp[1]['FileSystems'] for file_system in file_systems_list: yield 'FileSystem', _import_file_system(proxy, file_system) def _import_efs_region(proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Iterator[Tuple[str, Any]]: _log.info(f'import efs in {region}') if resource_gate(spec, 'FileSystem'): yield from _import_filesystems(proxy) SVC = RegionalService('efs', _import_efs_region)
import logging from typing import Any, Iterator, Tuple from introspector.aws.fetch import ServiceProxy from introspector.aws.svc import RegionalService, ServiceSpec, resource_gate _log = logging.getLogger(__name__) def _import_cloudformation_stacks(proxy: ServiceProxy): stacks_resp = proxy.list('describe_stacks') if stacks_resp is not None: stacks = stacks_resp[1]['Stacks'] for stack in stacks: yield 'Stack', stack def _import_cloudformation_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Iterator[Tuple[str, Any]]: _log.info(f'import cloudformation in {region}') if resource_gate(spec, 'Stack'): yield from _import_cloudformation_stacks(proxy) SVC = RegionalService('cloudformation', _import_cloudformation_region)
def add_logs_resource_policies(db: Session, proxy: Proxy, region_cache: RegionCache, writer: ImportWriter, import_job: ImportJob, ps: PathStack, account_id: str): for region in region_cache.regions_for_service('logs'): logs_proxy = proxy.service('logs', region) policies = _import_resource_policies(logs_proxy) synthesized = defaultdict(lambda: []) for prefix, statements in policies.items(): for log_group_uri in _log_group_uris_by_prefix( db, import_job.provider_account_id, account_id, region, prefix): synthesized[log_group_uri] += statements for uri, statements in synthesized.items(): policy = _make_policy(statements) writer(ps, 'ResourcePolicy', { 'Policy': policy, 'arn': uri }, {'region': region}) def _import_logs_region(proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Iterator[Tuple[str, Any]]: if resource_gate(spec, 'LogGroup'): _log.info(f'Import LogGroups in {region}') yield from _import_log_groups(proxy) SVC = RegionalService('logs', _import_logs_region)
def _import_db_clusters( proxy: ServiceProxy) -> Iterator[Tuple[str, List[Dict[str, Any]]]]: clusters_resp = proxy.list('describe_db_clusters') if clusters_resp is not None: clusters = clusters_resp[1].get('DBClusters', []) yield 'DBClusters', clusters def _import_db_instances( proxy: ServiceProxy) -> Iterator[Tuple[str, List[Dict[str, Any]]]]: instances_resp = proxy.list('describe_db_instances') if instances_resp is not None: instances = instances_resp[1].get('DBInstances', []) yield 'DBInstances', instances def _import_rds_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: if resource_gate(spec, 'DBCluster'): yield from _import_db_clusters(proxy) if resource_gate(spec, 'DBInstance'): yield from _import_db_instances(proxy) if resource_gate(spec, 'DBSnapshot'): yield from _import_db_snapshots(proxy) if resource_gate(spec, 'DBClusterSnapshot'): yield from _import_db_cluster_snapshots(proxy) SVC = RegionalService('rds', _import_rds_region)
try: rotation_status = proxy.get('get_key_rotation_status', KeyId=key_id) if rotation_status is not None: key_data['KeyRotationEnabled'] = rotation_status[ 'KeyRotationEnabled'] except ClientError as e: code = e.response.get('Error', {}).get('Code') if code == 'AccessDeniedException' and key_data['KeyManager'] == 'AWS': pass else: raise return key_data def _import_keys(proxy: ServiceProxy) -> Iterator[Tuple[str, Any]]: keys_resp = proxy.list('list_keys') if keys_resp is not None: key_list = keys_resp[1]['Keys'] for key_spec in key_list: yield 'Key', _import_key(proxy, key_spec['KeyId']) def _import_kms_region(proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Iterator[Tuple[str, Any]]: _log.info(f'import kms in {region}') if resource_gate(spec, 'Key'): yield from _import_keys(proxy) SVC = RegionalService('kms', _import_kms_region)
certificate_authority: Dict[str, Any]): arn = certificate_authority['Arn'] policy_resp = proxy.get('get_policy', ResourceArn=arn) if policy_resp is not None: certificate_authority['Policy'] = json.loads(policy_resp['Policy']) tags_resp = proxy.get('list_tags', CertificateAuthorityArn=arn) if tags_resp is not None: certificate_authority['Tags'] = tags_resp['Tags'] return certificate_authority def _import_certificate_authorities( proxy: ServiceProxy) -> Iterator[Tuple[str, Any]]: certificate_authorities_resp = proxy.list('list_certificate_authorities') if certificate_authorities_resp is not None: certificate_authorities = certificate_authorities_resp[1].get( 'CertificateAuthorities', []) for certificate_authority in certificate_authorities: yield 'CertificateAuthority', _import_certificate_authority( proxy, certificate_authority) def _import_acm_pca_region(proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Iterator[Tuple[str, Any]]: _log.info(f'import acm-pca in {region}') if resource_gate(spec, 'CertificateAuthority'): yield from _import_certificate_authorities(proxy) SVC = RegionalService('acm-pca', _import_acm_pca_region)
def _import_parameter(proxy: ServiceProxy, parameter_data: Dict): # TODO: bump permissions to get this value # resource_id = parameter_data['Name'] # tags_result = proxy.get('list_tags_for_resource', # ResourceId=resource_id, # ResourceType='Parameter') # if tags_result is not None: # tag_list = tags_result.get('TagList', []) # parameter_data['Tags'] = tag_list return parameter_data def _import_parameters(proxy: ServiceProxy, region: str): parameters_resp = proxy.list('describe_parameters') if parameters_resp is not None: parameters = parameters_resp[1]['Parameters'] if parameters is not None: for parameter in parameters: yield 'Parameter', _import_parameter(proxy, parameter) def _import_ssm_region( proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Generator[Tuple[str, Any], None, None]: if resource_gate(spec, 'Parameter'): _log.info(f'importing describe_parameters {region}') yield from _import_parameters(proxy, region) SVC = RegionalService('ssm', _import_ssm_region)
def _import_recorder(proxy: ServiceProxy, recorder_data: Dict): name = recorder_data['name'] recorder_status_resp = proxy.list('describe_configuration_recorder_status', ConfigurationRecorderNames=[name]) if recorder_status_resp is not None: statuses = recorder_status_resp[1]['ConfigurationRecordersStatus'] if len(statuses) == 1: recorder_data.update(statuses[0]) elif len(statuses) > 1: _log.warn( f'Received multiple statuses for configuration recorder {name}') return recorder_data def _import_recorders(proxy: ServiceProxy): recorders_resp = proxy.list('describe_configuration_recorders') if recorders_resp is not None: recorders = recorders_resp[1]['ConfigurationRecorders'] for recorder in recorders: yield 'ConfigurationRecorder', _import_recorder(proxy, recorder) def _import_config_region(proxy: ServiceProxy, region: str, spec: ServiceSpec) -> Iterator[Tuple[str, Any]]: _log.info(f'import config recorders in {region}') if resource_gate(spec, 'ConfigurationRecorder'): yield from _import_recorders(proxy) SVC = RegionalService('config', _import_config_region)