def main(bucket_name, account_number, role_name): """ Retrieves the accounts list from the specified S3 bucket. Parameters ---------- bucket_name : string The S3 bucket name where the accounts list is stored. account_number : int The account number where the bucket lives. role_name : string The name of the role in the account to assume. Returns ------- dict A dictionary of the accounts. """ temp_keys = assume_role.main(account_number, role_name) local_session = boto3.Session( aws_access_key_id=temp_keys['AccessKeyId'], aws_secret_access_key=temp_keys['SecretAccessKey'], aws_session_token=temp_keys['SessionToken'], ) s3 = local_session.resource('s3') content_object = s3.Object(bucket_name, 'accounts.json') file_content = content_object.get()['Body'].read().decode('utf-8') accounts_json = json.loads(file_content) return (accounts_json)
def lambda_handler(event, context): """ Runs through a list of AWS accounts stored in a csv file (created via accounts.py) and generates a credential accounts_list per account and saves them in a specified S3 bucket. NOTE: It's easier to have the same role name throughout all accounts. Parameters ---------- event : object The event invoking the lambda. Should be CloudWatch. context : object Information about invocation, function and execution environment. """ print(f'Incoming Event: {e}') accounts_list = dict() try: print(f'Processing organisation account.') # Assume a role in the account. temp_keys = assume_role.main(os.environ['org_account'], os.environ['role']) local_session = boto3.Session( aws_access_key_id=temp_keys['AccessKeyId'], aws_secret_access_key=temp_keys['SecretAccessKey'], aws_session_token=temp_keys['SessionToken'], ) organisations_session = local_session.client('organizations') paginator = organisations_session.get_paginator('list_accounts') accounts_paginator = paginator.paginate() for accounts in accounts_paginator: for account in accounts: accounts_list.update({ organization['Id']: { "ID": organization['Id'], "Name": organization['Name'], "Email": organization['Email'], "Alias": "" } }) if os.environ['get_aliases']: accounts_list = get_account_aliases(file_contents) save_output.main(accounts_list, 'accounts.json', os.environ['bucket'], os.environ['account'], os.environ['role']) except Exception as e: print(f'Exception in the accounts utility: {e}')
def get_account_aliases(accounts_list): """ Assume roles in all accounts listed under the organisation and gather their account aliases. Parameters ---------- accounts_list : dict A dict containing all accounts listed under the organisation. Returns ------- dict An updated accounts_list dict containing account aliases. """ for account in accounts_list: print(f'Processing account: {account}') # Assume a role in the account. temp_keys = assume_role.main(account, os.environ['role']) local_session = boto3.Session( aws_access_key_id=temp_keys['AccessKeyId'], aws_secret_access_key=temp_keys['SecretAccessKey'], aws_session_token=temp_keys['SessionToken'], ) iam = local_session.client('iam') aliases = iam.list_account_aliases() alias = aliases['AccountAliases'][0] accounts_list[account]['Alias'] = alias return accounts_list
def lambda_handler(event, context): """ Runs through a list of AWS accounts stored in a csv file (created via accounts.py) and generates a dict of Elastic IPs, network interfaces and related data per account and saves it in a specified S3 bucket. NOTE: It's easier to have the same role name throughout all accounts. Parameters ---------- event : object The event invoking the lambda. Should be CloudWatch. context : object Information about invocation, function and execution environment. """ print(f'Incoming Event: {e}') try: accounts_response = get_account_list.main(os.environ['bucket'], os.environ['account'], os.environ['role']) if not accounts_response: return "No accounts list to use" for account in accounts_response: print(f'Processing account: {account}') # Assume a role in the account. temp_keys = assume_role.main(account, os.environ['role']) local_session = boto3.Session( aws_access_key_id=temp_keys['AccessKeyId'], aws_secret_access_key=temp_keys['SecretAccessKey'], aws_session_token=temp_keys['SessionToken'], ) ec2 = local_session.client('ec2') # Paginate over the result from the EC2 describe. paginator = ec2.get_paginator('describe_security_groups') security_groups = paginator.paginate() for security_group in security_groups: for groups in security_group['SecurityGroups']: security_group_id = groups['GroupId'] security_group_name = groups['GroupName'] for permission in groups['IpPermissions']: ip_permissions = dict() if 'FromPort' in permission: from_port = permission['FromPort'] to_port = permission['ToPort'] if from_port == to_port: ports = str(from_port) else: ports = str(from_port) + " - " + str(to_port) protocol = permission['IpProtocol'] ip_ranges = [] for ip in permission['IpRanges']: ip_ranges.append(ip['CidrIp']) ip_permissions.update({ "Ports": ports, "Protocol": protocol.upper(), "IPRanges": ip_ranges }) create_dict(report, security_group_id, security_group_name, account, ip_permissions) nacls = ec2.describe_network_acls() for nacl in nacls['NetworkAcls']: for x in nacl['Associations']: report[account]['NetworkACLs'].append([x]) save_output.main(report, os.environ['security_groups'], os.environ['bucket'], os.environ['account'], os.environ['role']) except Exception as e: print(f'Exception in security-groups utility: {e}')
def lambda_handler(event, context): """ Runs through a list of AWS accounts stored in a csv file (created via accounts.py) and generates a credential report per account and saves them in a specified S3 bucket. NOTE: It's easier to have the same role name throughout all accounts. Parameters ---------- event : object The event invoking the lambda. Should be CloudWatch. context : object Information about invocation, function and execution environment. """ print(f'Incoming Event: {e}') accounts_list = list() try: accounts_response = get_account_list.main(os.environ['bucket'], os.environ['account'], os.environ['role']) if not accounts_response: return "No accounts list to use" for account in accounts_response: print(f'Processing account: {account}') # Assume a role in the account. temp_keys = assume_role.main(account, os.environ['role']) local_session = boto3.Session( aws_access_key_id=temp_keys['AccessKeyId'], aws_secret_access_key=temp_keys['SecretAccessKey'], aws_session_token=temp_keys['SessionToken'], ) iam_session = local_session.client('iam') # Execute the AWS generate credential report tool. iam_session.generate_credential_report() # This can take awhile depending on how many users there are per account while True: try: get_report = iam_session.get_credential_report() report = get_report['Content'] break except ClientError: pass file_name = f'Credentials-{account}.csv' save_output.main(report, file_name, os.environ['bucket'], os.environ['account'], os.environ['role']) except Exception as e: print(f'Exception in the credentials utility: {e}')
def lambda_handler(event, context): """ Runs through a list of AWS accounts stored in a csv file (created via accounts.py) and generates a dict of Elastic IPs, network interfaces and related data per account and saves it in a specified S3 bucket. NOTE: It's easier to have the same role name throughout all accounts. Parameters ---------- event : object The event invoking the lambda. Should be CloudWatch. context : object Information about invocation, function and execution environment. """ print(f'Incoming Event: {e}') accounts_list = list() report = dict() try: accounts_response = get_account_list.main(os.environ['bucket'], os.environ['account'], os.environ['role']) if not accounts_response: return "No accounts list to use" for account in accounts_response: print(f'Processing account: {account}') # Assume a role in the account. temp_keys = assume_role.main(account, os.environ['role']) local_session = boto3.Session( aws_access_key_id=temp_keys['AccessKeyId'], aws_secret_access_key=temp_keys['SecretAccessKey'], aws_session_token=temp_keys['SessionToken'], ) ec2_session = local_session.client('ec2') addresses = ec2_session.describe_addresses() # Gather the association and disassociation events from CloudTrail cloudtrail_session = local_session.client('cloudtrail') association_events = cloudtrail_session.lookup_events( LookupAttributes=[{ 'AttributeKey': 'EventName', 'AttributeValue': 'AssociateAddress' }]) disassociation_events = cloudtrail_session.lookup_events( LookupAttributes=[{ 'AttributeKey': 'EventName', 'AttributeValue': 'DisassociateAddress' }]) events = [] if 'Events' in association_events: events = list(association_events['Events']) if 'Events' in disassociation_events: events.extend(disassociation_events['Events']) ips = dict() for address in addresses['Addresses']: external_ip = address['PublicIp'] if external_ip not in ips: ips.update({external_ip: {}}) if 'AllocationId' in address: allocation_id = address['AllocationId'] for event in events: if 'EventTime' in event: event_time = event['EventTime'] event_name = event['EventName'] if event_name == 'DisassociateAddress': ips.update({ external_ip: { event['EventName']: { "Time": event_time.strftime( "%H:%M:%S : %d-%m-%y") } } }) elif event_name == 'AssociateAddress': for resource in event['Resources']: resource_type = resource['ResourceType'] if resource_type == 'AWS::EC2::EIP': resource_name = resource[ 'ResourceName'] if allocation_id in resource_name: for resources in event[ 'Resources']: if 'AWS::EC2::Instance' in resources[ 'ResourceType']: resource_name = resources[ 'ResourceName'] if ips.get(external_ip): ips.get( external_ip ).update({ event['EventName']: { "Time": event_time. strftime( "%H:%M:%S : %d-%m-%y" ), "EC2 Instance": resource_name } }) else: ips.update({ external_ip: { event['EventName']: { "Time": event_time. strftime( "%H:%M:%S : %d-%m-%y" ), "EC2 Instance": resource_name } } }) report.update({account: {"IPS": ips}}) session_elb = local_session.client('elbv2') elb = session_elb.describe_load_balancers() interfaces = ec2.describe_network_interfaces() for interfaces in interfaces['NetworkInterfaces']: if 'Groups' in interfaces: for groups in interfaces['Groups']: sg_group = groups['GroupId'] if get_security_groups(elb, sg_group, interfaces): for addresses in interfaces['PrivateIpAddresses']: if 'Association' in addresses: if 'PublicIp' in addresses['Association']: external_ip = addresses['Association'][ 'PublicIp'] if external_ip not in ips: ips.update({external_ip: {}}) ips.update({ external_ip: { "LoadBalancer": get_security_groups( elb, sg_group, interfaces) } }) save_output.main(report, os.environ['elastic_ip'], os.environ['bucket'], os.environ['account'], os.environ['role']) except Exception as e: print(f'Exception in the elastic-ip utility: {e}')