def _import_repository(proxy: ServiceProxy, repository: Dict) -> Dict[str, Any]: arn = repository['repositoryArn'] name = repository['repositoryName'] registry = repository['registryId'] try: tags_resp = proxy.list('list_tags_for_resource', resourceArn=arn) if tags_resp is not None: repository['Tags'] = tags_resp[1]['tags'] except botocore.exceptions.ClientError as e: code = e.response.get('Error', {}).get('Code') if code == 'AccessDeniedException': repository['Tags'] = None else: raise try: policy_resp = proxy.get('get_repository_policy', repositoryName=name, registryId=registry) if policy_resp is not None: repository['Policy'] = json.loads(policy_resp['policyText']) except botocore.exceptions.ClientError as e: code = e.response.get('Error', {}).get('Code') if code == 'RepositoryPolicyNotFoundException': repository['Policy'] = None else: raise return repository
def _import_hosted_zone(proxy: ServiceProxy, hosted_zone: Dict): zone_id = hosted_zone['Id'] if zone_id.startswith('/hostedzone/'): zone_id = zone_id[len('/hostedzone/'):] hosted_zone['Id'] = zone_id query_logging_resp = proxy.list('list_query_logging_configs', HostedZoneId=zone_id) if query_logging_resp is not None: hosted_zone['QueryLoggingConfigs'] = query_logging_resp[1][ 'QueryLoggingConfigs'] vpcs_resp = proxy.list('list_vpc_association_authorizations', HostedZoneId=zone_id) if vpcs_resp is not None: hosted_zone['VPCs'] = vpcs_resp[1]['VPCs'] traffic_policies_resp = proxy.list( 'list_traffic_policy_instances_by_hosted_zone', HostedZoneId=zone_id) if traffic_policies_resp is not None: hosted_zone['TrafficPolicyInstances'] = traffic_policies_resp[1][ 'TrafficPolicyInstances'] resource_record_sets_resp = proxy.list('list_resource_record_sets', HostedZoneId=zone_id) if resource_record_sets_resp is not None: hosted_zone['ResourceRecordSets'] = resource_record_sets_resp[1][ 'ResourceRecordSets'] tags_resp = proxy.list('list_tags_for_resource', ResourceType='hostedzone', ResourceId=zone_id) if tags_resp is not None: hosted_zone['Tags'] = tags_resp[1]['ResourceTagSet']['Tags'] return hosted_zone
def _import_key(proxy: ServiceProxy, key_id: str): key_data = proxy.get('describe_key', KeyId=key_id).get('KeyMetadata', {}) try: tags_resp = proxy.get('list_resource_tags', KeyId=key_id) if tags_resp is not None: key_data['Tags'] = tags_resp['Tags'] except ClientError as e: code = e.response.get('Error', {}).get('Code') if code == 'AccessDeniedException' and key_data['KeyManager'] == 'AWS': key_data['Tags'] = [] else: raise policy_resp = proxy.get('get_key_policy', KeyId=key_id, PolicyName='default') if policy_resp is not None: key_data['Policy'] = json.loads(policy_resp['Policy']) try: rotation_status = proxy.get('get_key_rotation_status', KeyId=key_id) if rotation_status is not None: key_data['KeyRotationEnabled'] = rotation_status[ 'KeyRotationEnabled'] except ClientError as e: code = e.response.get('Error', {}).get('Code') if code == 'AccessDeniedException' and key_data['KeyManager'] == 'AWS': pass else: raise return key_data
def _import_certificate(proxy: ServiceProxy, summary: Dict) -> Dict[str, Any]: arn = summary['CertificateArn'] certificate = proxy.get('describe_certificate', CertificateArn=arn)['Certificate'] tags_resp = proxy.list('list_tags_for_certificate', CertificateArn=arn) if tags_resp is not None: certificate['Tags'] = tags_resp[1]['Tags'] return certificate
def _import_log_group(proxy: ServiceProxy, group: Dict) -> Dict: name = group['logGroupName'] tags_result = proxy.list('list_tags_log_group', logGroupName=name) if tags_result is not None: group['Tags'] = tags_result[1]['tags'] filters_resp = proxy.list('describe_metric_filters', logGroupName=name) if filters_resp is not None: group['MetricFilters'] = filters_resp[1]['metricFilters'] return group
def _import_certificate_authority(proxy: ServiceProxy, certificate_authority: Dict[str, Any]): arn = certificate_authority['Arn'] policy_resp = proxy.get('get_policy', ResourceArn=arn) if policy_resp is not None: certificate_authority['Policy'] = json.loads(policy_resp['Policy']) tags_resp = proxy.get('list_tags', CertificateAuthorityArn=arn) if tags_resp is not None: certificate_authority['Tags'] = tags_resp['Tags'] return certificate_authority
def _import_distribution(proxy: ServiceProxy, summary: Dict) -> Dict[str, Any]: arn = summary['ARN'] distribution_id = summary['Id'] distribution = proxy.get('get_distribution', Id=distribution_id)['Distribution'] config = proxy.get('get_distribution_config', Id=distribution_id) distribution.update(config['DistributionConfig']) tags_resp = proxy.list('list_tags_for_resource', Resource=arn) if tags_resp is not None: distribution['Tags'] = tags_resp[1]['Tags']['Items'] return distribution
def _import_domain(proxy: ServiceProxy, domain_name: str) -> Dict[str, Any]: domain_resp = proxy.get('describe_elasticsearch_domain', DomainName=domain_name) if domain_resp is None: raise GFNoAccess('es', 'describe_elasticsearch_domain') domain = domain_resp['DomainStatus'] domain['AccessPolicies'] = json.loads(domain.get('AccessPolicies', 'null')) tags_resp = proxy.get('list_tags', ARN=domain['ARN']) if tags_resp is not None: domain['Tags'] = tags_resp['TagList'] return domain
def _import_roles(proxy: ServiceProxy, spec: ServiceSpec): if resource_gate(spec, 'role'): roles = _unpack(proxy.list('list_roles')) for role in roles['Roles']: role_data = proxy.get('get_role', RoleName=role['RoleName'])['Role'] name = role_data['RoleName'] for attr, op in _ROLE_ATTRS.items(): op_result = _unpack(proxy.list(op, RoleName=name)) role_data[attr] = op_result.get(attr) role_data['PolicyList'] = _fetch_inline_policies( proxy, 'role', name) yield 'role', role_data
def _import_groups(proxy: ServiceProxy, spec: ServiceSpec): if resource_gate(spec, 'group'): groups = _unpack(proxy.list('list_groups')) for group in groups.get('Groups'): group_data = group.copy() name = group_data['GroupName'] for attr, op in _GROUP_ATTRS.items(): op_result = proxy.list(op, GroupName=name) if op_result is not None: group_data[attr] = op_result[1][attr] group_data['PolicyList'] = _fetch_inline_policies( proxy, 'group', name) yield 'group', group_data
def _import_trail(proxy: ServiceProxy, trail_data: Dict): name = trail_data['Name'] arn = trail_data['TrailARN'] status = proxy.get('get_trail_status', Name=name) trail_data.update(status) tags_result = proxy.list('list_tags', ResourceIdList=[arn]) if tags_result is not None: tag_list = tags_result[1]['ResourceTagList'] if len(tag_list) > 0: trail_data['Tags'] = tag_list[0]['TagsList'] event_selectors = proxy.get('get_event_selectors', TrailName=name) trail_data['EventSelectors'] = event_selectors['EventSelectors'] return trail_data
def _import_queue(proxy: ServiceProxy, queue_url: str) -> Dict[str, Any]: attrs_resp = proxy.get('get_queue_attributes', QueueUrl=queue_url, AttributeNames=['All']) if attrs_resp is None: raise GFNoAccess('sqs', 'get_queue_attributes') attrs = attrs_resp['Attributes'] attrs['url'] = queue_url attrs['Policy'] = json.loads(attrs.get('Policy', 'null')) tags_resp = proxy.get('list_queue_tags', QueueUrl=queue_url) if tags_resp is not None: attrs['Tags'] = tags_resp['Tags'] return attrs
def _import_environment(proxy: ServiceProxy, environment: Dict) -> Dict[str, Any]: arn = environment['EnvironmentArn'] tags_resp = proxy.list('list_tags_for_resource', ResourceArn=arn) if tags_resp is not None: environment['Tags'] = tags_resp[1]['ResourceTags'] resources_resp = proxy.get('describe_environment_resources', EnvironmentName=environment['EnvironmentName']) if resources_resp is not None: environment.update(resources_resp['EnvironmentResources']) else: _log.warn(f'Failed to get environment resources for {arn}') return environment
def _import_db_snapshots( proxy: ServiceProxy) -> Iterator[Tuple[str, Dict[str, Any]]]: snapshots_resp = proxy.list('describe_db_snapshots') if snapshots_resp is not None: snapshots = snapshots_resp[1].get('DBSnapshots', []) for snapshot in snapshots: snapshot_id = snapshot['DBSnapshotIdentifier'] attrs_resp = proxy.get('describe_db_snapshot_attributes', DBSnapshotIdentifier=snapshot_id) attrs = attrs_resp['DBSnapshotAttributesResult'].get( 'DBSnapshotAttributes', []) for attr in attrs: snapshot[attr['AttributeName']] = attr['AttributeValues'] yield 'DBSnapshot', snapshot
def _import_functions(proxy: ServiceProxy, spec: ServiceSpec): functions_resp = proxy.list('list_functions') if functions_resp is not None: functions = functions_resp[1]['Functions'] for function in functions: arn_for_tags = HAS_TAGS.get('list_functions') if arn_for_tags is not None: arn = function[arn_for_tags] tags_result = proxy.list('list_tags', Resource=arn) if tags_result is not None: function['Tags'] = tags_result[1].get('Tags', []) function['Policy'] = _get_policy(proxy, function['FunctionArn']) yield 'Function', function yield from _import_function(proxy, function, spec)
def _build_org_graph(proxy: ServiceProxy, account_id: str): try: org_resp = proxy.get('describe_organization')['Organization'] except botocore.exceptions.ClientError as e: code = e.response.get('Error', {}).get('Code') if code == 'AWSOrganizationsNotInUseException': return _build_dummy_org_graph(account_id) else: raise roots_resp = _require_resp(proxy.list('list_roots')) if roots_resp.get('introspector') == 'auth failure': return _build_sub_account_graph(account_id, org_resp) else: return _build_master_account_graph(proxy, org_resp, roots_resp)
def _import_vault(proxy: ServiceProxy, vault: Dict[str, Any]): vault_name = vault['VaultName'] policy_resp = proxy.get('get_vault_access_policy', vaultName=vault_name, accountId='-') if policy_resp is not None: policy_text = policy_resp.get('policy', {}).get('Policy') if policy_text is not None: vault['Policy'] = json.loads(policy_text) tags_resp = proxy.get('list_tags_for_vault', vaultName=vault_name, accountId='-') if tags_resp is not None: vault['Tags'] = tags_resp['Tags'] return vault
def _import_target_group(proxy: ServiceProxy, target_group: Dict): arn = target_group['TargetGroupArn'] attributes = proxy.get('describe_target_group_attributes', TargetGroupArn=arn)['Attributes'] for key, value in attributes: replaced = key.replace('.', '_') transform = _TG_ATTR_TYPES.get(replaced, str) transformed = transform(value) target_group[replaced] = transformed tags_resp = proxy.get('describe_tags', ResourceArns=[arn]) tag_descs = tags_resp.get('TagDescriptions', []) if len(tag_descs) == 1: target_group['Tags'] = tag_descs[0]['Tags'] return target_group
def _import_loadbalancer(proxy: ServiceProxy, lb: Dict): arn = lb['LoadBalancerArn'] attributes = proxy.get('describe_load_balancer_attributes', LoadBalancerArn=arn)['Attributes'] for key, value in attributes: replaced = key.replace('.', '_') transform = _LB_ATTR_TYPES.get(replaced, str) transformed = transform(value) lb[replaced] = transformed tags_resp = proxy.get('describe_tags', ResourceArns=[arn]) tag_descs = tags_resp.get('TagDescriptions', []) if len(tag_descs) == 1: lb['Tags'] = tag_descs[0]['Tags'] return lb
def _import_credential_report(proxy: ServiceProxy, spec: ServiceSpec): if resource_gate(spec, 'CredentialReport'): # Kick off the report started = False init_attempts = 0 while not started: try: resp = proxy.get('generate_credential_report') started = resp.get('State') in ('STARTED', 'COMPLETE') except ClientError as e: code = e.response.get('Error', {}).get('Code') is_throttled = code == 'Throttling' if not is_throttled: _log.error('credential report error', exc_info=e) raise # wait and try again? init_attempts += 1 if init_attempts >= 3: _log.error('credential report error', exc_info=e) raise GFError('Failed to generate credential report') time.sleep(0.1) except SSLError: # wait and try again? init_attempts += 1 if init_attempts >= 3: raise GFError( 'Failed to generate credential report, SSL Error') time.sleep(0.1) attempts = 0 report = None while attempts < 20 and report is None: try: report = proxy.get('get_credential_report') except ClientError as e: code = e.response.get('Error', {}).get('Code') if code == 'ReportInProgress': attempts += 1 time.sleep(1) else: _log.error('credenetial report fetch error', exc_info=e) raise if report is None: raise GFError('Failed to fetch credential report') decoded = report['Content'].decode('utf-8') reader = csv.DictReader(StringIO(decoded)) for row in reader: processed = _post_process_report_row(row) yield 'CredentialReport', processed
def _import_application(proxy: ServiceProxy, application: Dict) -> Dict[str, Any]: arn = application['ApplicationArn'] tags_resp = proxy.list('list_tags_for_resource', ResourceArn=arn) if tags_resp is not None: application['Tags'] = tags_resp[1]['ResourceTags'] return application
def _fetch_inline_policies(proxy: ServiceProxy, principal: str, name: str): kwargs = {f'{principal.capitalize()}Name': name} op = f'list_{principal}_policies' policies = _unpack(proxy.list(op, **kwargs)) policy_op = f'get_{principal}_policy' results = [] for policy_name in policies.get('PolicyNames', []): result = proxy.get(policy_op, PolicyName=policy_name, **kwargs) if result is None: raise GFInternal( f'Missing inline policy {policy_name} for {principal} {name}') results.append({ 'PolicyName': result['PolicyName'], 'PolicyDocument': result['PolicyDocument'] }) return results
def _import_distributions(proxy: ServiceProxy): distributions_resp = proxy.list('list_distributions') if distributions_resp is not None: distributions = distributions_resp[1].get('DistributionList', {}).get('Items', []) for summary in distributions: yield 'Distribution', _import_distribution(proxy, summary)
def _import_listeners(proxy: ServiceProxy, loadbalancer_arn: str): listeners_resp = proxy.list('describe_listeners', LoadBalancerArn=loadbalancer_arn) if listeners_resp is not None: listeners = listeners_resp[1].get('Listeners', []) for listener in listeners: yield 'Listener', listener
def _import_trails(proxy: ServiceProxy, region: str): trails_resp = proxy.list('describe_trails') if trails_resp is not None: trails = trails_resp[1]['trailList'] if trails is not None: for trail in trails: if trail is not None: # When you create a trail in the console you create a single trail. It can be multiregional # which means it runs in all regions. The console still shows this as one however the api will # return an object with the same ARN in every region. This is to squash that down to one. if (trail['IsMultiRegionTrail'] is False) or (trail['IsMultiRegionTrail'] and trail['HomeRegion'] == region): try: yield 'Trail', _import_trail(proxy, trail) except botocore.exceptions.ClientError as e: code = e.response.get('Error', {}).get('Code') # Some trails are visible to sub-accounts, but can't actually # be accessed. Ignore them, the master account will pick them up if code != 'TrailNotFoundException': raise else: _log.info( f'Skipping trail {trail["Name"]}, not found in this account' )
def _import_topic(proxy: ServiceProxy, topic_data: Dict) -> Dict: arn = topic_data['TopicArn'] tags_result = proxy.list('list_tags_for_resource', ResourceArn=arn) if tags_result is not None: topic_data['Tags'] = tags_result[1]['Tags'] attrs = proxy.get('get_topic_attributes', TopicArn=arn) for attr, value in attrs['Attributes'].items(): if attr in _TOPIC_SKIP: continue elif attr in _TOPIC_JSON: topic_data[attr] = json.loads(value) elif attr in _TOPIC_INT: topic_data[attr] = int(value) else: topic_data[attr] = value return topic_data
def _import_route53(proxy: ServiceProxy, spec: ServiceSpec) -> Iterator[Tuple[str, Any]]: if resource_gate(spec, 'HostzedZone'): zones_resp = proxy.list('list_hosted_zones') if zones_resp is not None: for zone in zones_resp[1]['HostedZones']: yield 'HostedZone', _import_hosted_zone(proxy, zone)
def _import_task_definitions(proxy: ServiceProxy): definitions_resp = proxy.list('list_task_definitions') if definitions_resp is not None: definition_arns = definitions_resp[1].get('taskDefinitionArns', []) for definition_arn in definition_arns: yield 'TaskDefinition', _import_task_definition( proxy, definition_arn)
def _import_parameters(proxy: ServiceProxy, region: str): parameters_resp = proxy.list('describe_parameters') if parameters_resp is not None: parameters = parameters_resp[1]['Parameters'] if parameters is not None: for parameter in parameters: yield 'Parameter', _import_parameter(proxy, parameter)
def _import_api(proxy: ServiceProxy, api: Dict) -> Dict[str, Any]: api_id = api['ApiId'] for resource in ApiResources: resp = proxy.list('get_' + resource, ApiId=api_id) if resp is not None: api[resource.capitalize()] = resp[1]['Items'] return api