def main(args, pacu_main: 'Main'): ###### Don't modify these. They can be removed if you are not using the function. session = pacu_main.get_active_session() args = parser.parse_args(args) print = pacu_main.print get_regions = pacu_main.get_regions ###### summary_data = {'region_key_pairs': []} regions = get_regions('lightsail') summary_data['dl_path'] = str(downloads_dir()) for region in regions: print(' Downloading default keys for {}...'.format(region)) cur_path = downloads_dir() / region if not cur_path.exists(): cur_path.mkdir() client = pacu_main.get_boto3_client('lightsail', region) downloaded_keys = client.download_default_key_pair() restructured_keys = { 'publicKey': downloaded_keys['publicKeyBase64'], 'privateKey': downloaded_keys['privateKeyBase64'] } private_path = cur_path / 'default' with private_path.open('w', encoding='utf-8') as key_file: key_file.write(restructured_keys['privateKey']) public_path = cur_path / 'default.pub' with public_path.open('w', encoding='utf-8') as key_file: key_file.write(restructured_keys['publicKey']) summary_data['region_key_pairs'].append(region) return summary_data
def get_function_source(session_name, func): try: # Get Link and setup file name fname = func['FunctionArn'].split(':') fname = fname[len(fname) - 1] code_url = func['Code']['Location'] # Download File from URL r = requests.get(code_url, stream=True) # Write Zip to output file fname = str(downloads_dir() / f'lambda_{fname}.zip') with open(fname, 'wb') as f: f.write(r.content) # Load Zip contents into memory lambda_zip = zipfile.ZipFile(fname) return { id: lambda_zip.read(name).decode("utf-8", errors='ignore') for name in lambda_zip.namelist() } except KeyError: print(Color.RED, 'Error getting {fname} Source'.format(fname))
def main(args, pacu_main: 'Main'): session = pacu_main.get_active_session() ###### Don't modify these. They can be removed if you are not using the function. args = parser.parse_args(args) print = pacu_main.print get_regions = pacu_main.get_regions ###### summary_data = {} if args.regions is None: regions = get_regions('cloudtrail') if regions is None or regions == [] or regions == '' or regions == {}: print( 'This module is not supported in any regions specified in the current sessions region set. Exiting...' ) return else: regions = args.regions.split(',') for region in regions: events = [] print('Downloading logs from {}:'.format(region)) print(' This may take a while...') client = pacu_main.get_boto3_client('cloudtrail', region) event_history = client.lookup_events(MaxResults=50, ) events += event_history['Events'] while 'NextToken' in event_history: print(' Processing additional results...') event_history = client.lookup_events( MaxResults=50, NextToken=event_history['NextToken']) events += event_history['Events'] summary_data[region] = len(events) print('Finished enumerating {}'.format(region)) now = time.time() filename = 'cloudtrail_{}_event_history_{}.json'.format(region, now) with save(filename) as f: json.dump(events, f, indent=2, default=str) print(' Events written to {}/cloudtrail_{}_event_history_{}.json'. format(downloads_dir(), region, now)) return summary_data
def export_api_doc(client, session, api_summary, exportType='swagger'): files_names = [] output_path = downloads_dir() / 'apigateway' api_id = api_summary['id'] api_name = api_summary['name'] stages = api_summary['stages'] for stage in stages: response = client.get_export(restApiId=api_id, stageName=stage, exportType=exportType) filename = f"{api_name}_{stage}_swagger.json" with open(output_path / filename, 'w') as f: data = json.loads(response['body'].read().decode("utf-8")) json.dump(data, f, indent=4) files_names.append(filename) return files_names
def write_keys_to_file(created_keys, session): for region in created_keys: ssh_key_dir = os.path.join(downloads_dir(), module_info['name'], region) if not os.path.exists(ssh_key_dir): os.makedirs(ssh_key_dir) for credential in created_keys[region]: if credential['protocol'] == 'rdp': windows_file_dir = os.path.join(ssh_key_dir, credential['instanceName']) try: with open(windows_file_dir, 'w') as windows_file: # Create header for file. windows_file.write( 'instanceName,ipAddress,username,password\n') windows_file.write(credential['instanceName'] + ',') windows_file.write(credential['ipAddress'] + ',') windows_file.write(credential['username'] + ',') windows_file.write(credential['password'] + '\n') except IOError: print('Error writing credential file for {}.'.format( credential['instanceName'])) continue else: private_key_file_dir = os.path.join(ssh_key_dir, credential['instanceName']) cert_key_file_dir = os.path.join( ssh_key_dir, credential['instanceName'] + '-cert.pub') try: with open(private_key_file_dir, 'w') as private_key_file: private_key_file.write(credential['privateKey']) with open(cert_key_file_dir, 'w') as cert_key_file: cert_key_file.write(credential['certKey']) except IOError: print('Error writing credential file for {}.'.format( credential['instanceName'])) continue
def main(args, pacu: Main): """Main module function, called from Pacu""" print = pacu.print session = pacu.get_active_session() snapshot_id = parser.parse_args(args).snapshot_id region = parser.parse_args(args).region if not snapshot_id: if not pacu.fetch_data(['EC2', 'Snapshots'], 'ebs__enum_volumes_snapshots', ''): print('Failed to fetch EBS snapshot data') return False try: s = snapshot_prompt(session.EC2['Snapshots']) snapshot_id = s['SnapshotId'] region = s['Region'] except UserWarning as e: print(*e.args) return False try: out_dir = downloads_dir() / 'ebs/snapshots' snap = snapshot.LocalSnapshot(str(out_dir), snapshot_id, pacu.get_boto_session(region=region), pacu.get_botocore_conf()) except UserWarning as e: print(*e.args) return False snap.fetch() return SummaryData( out_dir=str(out_dir.relative_to('.')), snapshot_id=snapshot_id, snapshot_path=str(snap.path), vagrantfile=str(utils.init_vagrant(out_dir, True)), )
def main(args, pacu_main): session = pacu_main.get_active_session() print = pacu_main.print args = parser.parse_args(args) if args.regions: regions = args.regions.split(',') else: get_regions = pacu_main.get_regions regions = get_regions('ssm') data = {} for region in regions: param_objs = [] NextToken = "" print('Looking for parameters in region {}...'.format(region)) client = pacu_main.get_boto3_client('ssm', region) #Catch errors with the ssm-* regions try: param_data = client.describe_parameters() except: print('ERROR: retrieving parameters from {}'.format(region)) continue #Check if any params in the region and add them to param_objs if param_data["Parameters"]: data[region] = {} print(' Found parameters in region {}...'.format(region)) param_objs += (param_data["Parameters"]) else: continue try: NextToken = param_data["NextToken"] except KeyError: pass #Paginate the results if needed, add them to param_objs if NextToken: while True: param_data = client.describe_parameters(NextToken=NextToken) param_objs += (param_data["Parameters"]) try: NextToken = param_data["NextToken"] except KeyError: break #Dump all param_objs Name fields into the data for the region for param_obj in param_objs: name = param_obj["Name"] data[region][name] = "" #Pull all param names for the region to then grab the values param_names = list(data[region].keys()) #client.get_parameters() only takes a list of 10 max, so break it up into 10s #Reference: https://www.geeksforgeeks.org/break-list-chunks-size-n-python/ param_names_by_max = [ param_names[i * SMS_GET_PARAM_MAX:(i + 1) * SMS_GET_PARAM_MAX] for i in range((len(param_names) + SMS_GET_PARAM_MAX - 1) // SMS_GET_PARAM_MAX) ] for names in param_names_by_max: full_params = client.get_parameters(Names=names, WithDecryption=True) for param_obj in full_params["Parameters"]: data[region][param_obj["Name"]] = param_obj["Value"] #data is a JSON object like # {"us-east-1": # { # "param_name1":"param_value1", # "param_name2":"param_value2" # } # } for ssm_region in data.keys(): with open(downloads_dir() / 'ssm_parameters/{}.txt'.format(ssm_region), 'w+') as f: json.dump(data[ssm_region], f, indent=2) info = {} info["save_path"] = str(downloads_dir() / 'ssm_parameters/') info["region_count"] = str(len(data.keys())) total_params = 0 for param_region in data.keys(): total_params += len(data[param_region].keys()) info["total_params"] = str(total_params) return info
def main(args, pacu_main: 'Main'): session = pacu_main.get_active_session() args = parser.parse_args(args) print = pacu_main.print get_regions = pacu_main.get_regions summary_data = {} if isinstance(args.from_time, str): from_time = parse_time(args.from_time) else: from_time = DEFAULT_FROM_TIME if isinstance(args.to_time, str): to_time = parse_time(args.to_time) scan_time = int(datetime.datetime.now().timestamp()) regions = get_regions('logs') log_groups = {} for region in regions: print('Enumerating {}...'.format(region)) client = pacu_main.get_boto3_client('logs', region) groups = collect_all(client, 'describe_log_groups', 'logGroups') if not groups: print(' No Log Groups found') continue else: print(' {} Log Groups found'.format(len(groups))) group_names = [group['logGroupName'] for group in groups] for group in group_names: log_groups[group] = {} for log_group in log_groups: streams = collect_all(client, 'describe_log_streams', 'logStreams', **{'logGroupName': log_group}) log_groups[log_group] = [ stream['logStreamName'] for stream in streams ] if not streams: print(' No Streams found') continue else: stream_count = sum([len(log_groups[key]) for key in log_groups]) print(' {} Streams found'.format(stream_count)) event_count = 0 for group in log_groups: for stream in log_groups[group]: start_time = millisecond(from_time) end_time = millisecond(to_time) if args.to_time else None kwargs = { 'logGroupName': group, 'logStreamNames': [stream], 'startTime': start_time, } if end_time: kwargs['endTime'] = end_time paginator = client.get_paginator('filter_log_events') page_iterator = paginator.paginate(**kwargs) for response in page_iterator: event_count += len(response['events']) write_stream_file(session.name, scan_time, group, stream, response['events']) print(' Captured Events for {}'.format(stream)) summary_data[region] = { 'groups': len(log_groups), 'streams': sum([len(log_groups[key]) for key in log_groups]), 'events': event_count, } dl_root = downloads_dir() + '/cloud_watch_logs/' summary_data['log_download_path'] = '{}{}'.format(dl_root, scan_time) return summary_data
def main(args, pacu_main: 'Main'): session = pacu_main.get_active_session() args = parser.parse_args(args) print = pacu_main.print get_regions = pacu_main.get_regions regions = get_regions('Inspector') complete_data = {} summary_data = { 'reports': 0, 'findings': 0, 'regions': regions, } if args.download_reports: summary_data['reports_location'] = downloads_dir( ) / 'inspector_assessments/' for region in regions: print('Starting region {}...'.format(region)) client = pacu_main.get_boto3_client('inspector', region) if args.download_reports: assessment_runs = [] response = '' try: response = client.list_assessment_runs() assessment_runs += response['assessmentRunArns'] while 'nextToken' in response: response = client.list_findings( nextToken=response['nextToken']) assessment_runs += response['assessmentRunArns'] except ClientError as error: if error.response['Error']['Code'] == 'AccessDeniedException': print('Access Denied for list-assessment-runs') if not assessment_runs: print(' No assessment runs found for {}'.format(region)) else: summary_data['reports'] += len(assessment_runs) for run in assessment_runs: response = client.get_assessment_report( assessmentRunArn=run, reportFileFormat='HTML', reportType='FULL') if response.get('url'): p = 'inspector_assessments/'.format( session.name) + str(run)[-10:] + '.html' print(' Report saved to: ' + p) with urllib.request.urlopen( response['url']) as response, save(p, 'a') as f: f.write(str(response.read())) else: print('Failed to generate report for {} ({})...'.format( run, response['status'])) findings = [] try: response = client.list_findings() findings = response['findingArns'] while 'nextToken' in response: response = client.list_findings( nextToken=response['nextToken']) findings += response['findingArns'] except ClientError as error: if error.response['Error']['Code'] == 'AccessDeniedException': print('Access Denied for list-findings') continue try: if len(findings) < 1: print(' No findings found') continue else: print(' {} findings found'.format(len(findings))) summary_data['findings'] += len(findings) descriptions = client.describe_findings( findingArns=findings)['findings'] complete_data[region] = descriptions except ClientError as error: if error.response['Error']['Code'] == 'AccessDeniedException': print('Access Denied for describe-findings') session.update(pacu_main.database, Inspector=complete_data) return summary_data
def main(args, pacu_main: 'Main'): session = pacu_main.get_active_session() ###### Don't modify these. They can be removed if you are not using the function. args = parser.parse_args(args) print = pacu_main.print input = pacu_main.input key_info = pacu_main.key_info fetch_data = pacu_main.fetch_data ###### summary_data = {'users_confirmed': 0, 'roles_confirmed': 0} users = [] roles = [] if args.all_users is True: if fetch_data(['IAM', 'Users'], module_info['prerequisite_modules'][0], '--users') is False: print('FAILURE') print(' SUB-MODULE EXECUTION FAILED') return fetched_users = session.IAM['Users'] for user in fetched_users: users.append({ 'UserName': user['UserName'], 'PermissionsConfirmed': True, 'Permissions': { 'Allow': {}, 'Deny': {} } }) elif args.user_name is not None: users.append({ 'UserName': args.user_name, 'PermissionsConfirmed': True, 'Permissions': { 'Allow': {}, 'Deny': {} } }) summary_data['single_user'] = args.user_name if args.all_roles is True: if fetch_data(['IAM', 'Roles'], module_info['prerequisite_modules'][0], '--roles') is False: print('FAILURE') print(' SUB-MODULE EXECUTION FAILED') return fetched_roles = session.IAM['Roles'] for role in fetched_roles: roles.append({ 'RoleName': role['RoleName'], 'PermissionsConfirmed': True, 'Permissions': { 'Allow': {}, 'Deny': {} } }) elif args.role_name is not None: roles.append({ 'RoleName': args.role_name, 'PermissionsConfirmed': True, 'Permissions': { 'Allow': {}, 'Deny': {} } }) summary_data['single_role'] = args.role_name is_user = is_role = False if not any( [args.all_users, args.user_name, args.all_roles, args.role_name]): client = pacu_main.get_boto3_client('sts') identity = client.get_caller_identity() active_aws_key = session.get_active_aws_key(pacu_main.database) if re.match(r'arn:aws:iam::\d{12}:user/', identity['Arn']) is not None: is_user = True client = pacu_main.get_boto3_client('iam') try: user = client.get_user() active_aws_key.update(pacu_main.database, user_name=user['User']['UserName'], arn=identity['Arn'], user_id=identity['UserId'], account_id=identity['Account']) except botocore.exceptions.ClientError: username = input( 'Failed to discover the current users username, enter it now or Ctrl+C to exit the module: ' ).strip() if username: active_aws_key.update(pacu_main.database, user_name=username, arn=identity['Arn'], user_id=identity['UserId'], account_id=identity['Account']) else: # Update the information from get_caller_identity and exit active_aws_key.update(pacu_main.database, arn=identity['Arn'], user_id=identity['UserId'], account_id=identity['Account']) return False elif re.match(r'arn:aws:sts::\d{12}:assumed-role/', identity['Arn']) is not None: is_role = True active_aws_key.update(pacu_main.database, role_name=identity['Arn'].split( ':assumed-role/')[1].split('/')[-2], arn=identity['Arn'], user_id=identity['UserId'], account_id=identity['Account']) else: print('Not an IAM user or role. Exiting...\n') return False if is_user: user = key_info(alias=session.key_alias) user['PermissionsConfirmed'] = True user['Permissions'] = {'Allow': {}, 'Deny': {}} users.append(user) summary_data['single_user'] = user['UserName'] elif is_role: roles.append({ 'RoleName': active_aws_key.role_name, 'PermissionsConfirmed': True, 'Permissions': { 'Allow': {}, 'Deny': {} } }) summary_data['single_role'] = active_aws_key.role_name # list-groups-for-user # list-user-policies # list-group-policies # list-role-policies # list-attached-role-policies # list-attached-group-policies # list-attached-user-policies # get-policy # get-policy-version # get-user-policy # get-group-policy # get-role-policy client = pacu_main.get_boto3_client('iam') if any([args.all_users, args.user_name, args.all_roles, args.role_name]): print('Permission Document Location:') print(' {}/confirmed_permissions/\n'.format(downloads_dir())) if roles: print('Confirming permissions for roles:') for role in roles: print(' {}...'.format(role['RoleName'])) role['Policies'] = [] try: # Get inline role policies policies = [] try: response = client.list_role_policies( RoleName=role['RoleName']) policies = response['PolicyNames'] while 'IsTruncated' in response and response[ 'IsTruncated'] is True: response = client.list_role_policies( RoleName=role['RoleName'], Marker=response['Marker']) policies += response['PolicyNames'] for policy in policies: role['Policies'].append({'PolicyName': policy}) except ClientError as error: print(' List role policies failed') if error.response['Error']['Code'] == 'AccessDenied': print( ' FAILURE: MISSING REQUIRED AWS PERMISSIONS') else: print(' {}'.format( error.response['Error']['Code'])) role['PermissionsConfirmed'] = False # Get document for each inline policy for policy in policies: try: document = client.get_role_policy( RoleName=role['RoleName'], PolicyName=policy)['PolicyDocument'] except ClientError as error: print(' Get role policy failed') if error.response['Error']['Code'] == 'AccessDenied': print( ' FAILURE: MISSING REQUIRED AWS PERMISSIONS' ) else: print(' {}'.format( error.response['Error']['Code'])) role['PermissionsConfirmed'] = False role = parse_document(document, role) # Get attached role policies attached_policies = [] try: response = client.list_attached_role_policies( RoleName=role['RoleName']) attached_policies = response['AttachedPolicies'] while 'IsTruncated' in response and response[ 'IsTruncated'] is True: response = client.list_attached_role_policies( RoleName=role['RoleName'], Marker=response['Marker']) attached_policies += response['AttachedPolicies'] role['Policies'] += attached_policies except ClientError as error: print(' List attached role policies failed') if error.response['Error']['Code'] == 'AccessDenied': print( ' FAILURE: MISSING REQUIRED AWS PERMISSIONS') else: print(' {}'.format( error.response['Error']['Code'])) role['PermissionsConfirmed'] = False role = parse_attached_policies(client, attached_policies, role) if role['PermissionsConfirmed']: summary_data['roles_confirmed'] += 1 if args.role_name is None and args.all_roles is False: print(' Confirmed permissions for {}'.format( role['RoleName'])) active_aws_key.update( pacu_main.database, role_name=role['RoleName'], policies=role['Policies'], permissions_confirmed=role['PermissionsConfirmed'], allow_permissions=role['Permissions']['Allow'], deny_permissions=role['Permissions']['Deny']) else: with save( 'confirmed_permissions/role-{}.json'.format( role['RoleName']), 'w+') as f: json.dump(role, f, indent=2, default=str) print(' Permissions stored in role-{}.json'.format( role['RoleName'])) except ClientError as error: if error.response['Error']['Code'] == 'AccessDenied': print(' FAILURE: MISSING REQUIRED AWS PERMISSIONS') else: print(' {}'.format(error.response['Error']['Code'])) print('Skipping {}'.format(role['RoleName'])) if users: print() if users: print('Confirming permissions for users:') for user in users: print(' {}...'.format(user['UserName'])) user['Groups'] = [] user['Policies'] = [] try: policies = [] # Get groups that the user is in try: response = client.list_groups_for_user( UserName=user['UserName']) user['Groups'] = response['Groups'] while 'IsTruncated' in response and response[ 'IsTruncated'] is True: response = client.list_groups_for_user( UserName=user['UserName'], Marker=response['Marker']) user['Groups'] += response['Groups'] except ClientError as error: print(' List groups for user failed') if error.response['Error']['Code'] == 'AccessDenied': print( ' FAILURE: MISSING REQUIRED AWS PERMISSIONS') else: print(' {}'.format( error.response['Error']['Code'])) user['PermissionsConfirmed'] = False # Get inline and attached group policies for group in user['Groups']: group['Policies'] = [] # Get inline group policies try: response = client.list_group_policies( GroupName=group['GroupName']) policies = response['PolicyNames'] while 'IsTruncated' in response and response[ 'IsTruncated'] is True: response = client.list_group_policies( GroupName=group['GroupName'], Marker=response['Marker']) policies += response['PolicyNames'] except ClientError as error: print(' List group policies failed') if error.response['Error']['Code'] == 'AccessDenied': print( ' FAILURE: MISSING REQUIRED AWS PERMISSIONS' ) else: print(' {}'.format( error.response['Error']['Code'])) user['PermissionsConfirmed'] = False # Get document for each inline policy for policy in policies: group['Policies'].append( { # Add policies to list of policies for this group 'PolicyName': policy }) try: document = client.get_group_policy( GroupName=group['GroupName'], PolicyName=policy)['PolicyDocument'] except ClientError as error: print(' Get group policy failed') if error.response['Error'][ 'Code'] == 'AccessDenied': print( ' FAILURE: MISSING REQUIRED AWS PERMISSIONS' ) else: print(' {}'.format( error.response['Error']['Code'])) user['PermissionsConfirmed'] = False user = parse_document(document, user) # Get attached group policies attached_policies = [] try: response = client.list_attached_group_policies( GroupName=group['GroupName']) attached_policies = response['AttachedPolicies'] while 'IsTruncated' in response and response[ 'IsTruncated'] is True: response = client.list_attached_group_policies( GroupName=group['GroupName'], Marker=response['Marker']) attached_policies += response['AttachedPolicies'] group['Policies'] += attached_policies except ClientError as error: print(' List attached group policies failed') if error.response['Error']['Code'] == 'AccessDenied': print( ' FAILURE: MISSING REQUIRED AWS PERMISSIONS' ) else: print(' {}'.format( error.response['Error']['Code'])) user['PermissionsConfirmed'] = False user = parse_attached_policies(client, attached_policies, user) # Get inline user policies policies = [] if 'Policies' not in user: user['Policies'] = [] try: response = client.list_user_policies( UserName=user['UserName']) policies = response['PolicyNames'] while 'IsTruncated' in response and response[ 'IsTruncated'] is True: response = client.list_user_policies( UserName=user['UserName'], Marker=response['Marker']) policies += response['PolicyNames'] for policy in policies: user['Policies'].append({'PolicyName': policy}) except ClientError as error: print(' List user policies failed') if error.response['Error']['Code'] == 'AccessDenied': print( ' FAILURE: MISSING REQUIRED AWS PERMISSIONS') else: print(' {}'.format( error.response['Error']['Code'])) user['PermissionsConfirmed'] = False # Get document for each inline policy for policy in policies: try: document = client.get_user_policy( UserName=user['UserName'], PolicyName=policy)['PolicyDocument'] except ClientError as error: print(' Get user policy failed') if error.response['Error']['Code'] == 'AccessDenied': print( ' FAILURE: MISSING REQUIRED AWS PERMISSIONS' ) else: print(' {}'.format( error.response['Error']['Code'])) user['PermissionsConfirmed'] = False user = parse_document(document, user) # Get attached user policies attached_policies = [] try: response = client.list_attached_user_policies( UserName=user['UserName']) attached_policies = response['AttachedPolicies'] while 'IsTruncated' in response and response[ 'IsTruncated'] is True: response = client.list_attached_user_policies( UserName=user['UserName'], Marker=response['Marker']) attached_policies += response['AttachedPolicies'] user['Policies'] += attached_policies except ClientError as error: print(' List attached user policies failed') if error.response['Error']['Code'] == 'AccessDenied': print( ' FAILURE: MISSING REQUIRED AWS PERMISSIONS') else: print(' {}'.format( error.response['Error']['Code'])) user['PermissionsConfirmed'] = False user = parse_attached_policies(client, attached_policies, user) if user['PermissionsConfirmed']: summary_data['users_confirmed'] += 1 if args.user_name is None and args.all_users is False: print(' Confirmed Permissions for {}'.format( user['UserName'])) active_aws_key.update( pacu_main.database, user_name=user['UserName'], arn=user['Arn'], user_id=user['UserId'], groups=user['Groups'], policies=user['Policies'], permissions_confirmed=user['PermissionsConfirmed'], allow_permissions=user['Permissions']['Allow'], deny_permissions=user['Permissions']['Deny']) else: with save( 'confirmed_permissions/user-{}.json'.format( session.name, user['UserName']), 'w+') as f: json.dump(user, f, indent=2, default=str) print(' Permissions stored in user-{}.json'.format( user['UserName'])) except ClientError as error: if error.response['Error']['Code'] == 'AccessDenied': print(' FAILURE: MISSING REQUIRED AWS PERMISSIONS') else: print(' {}'.format(error.response['Error']['Code'])) print('Skipping {}'.format(user['UserName'])) return summary_data
def main(args, pacu_main): session = pacu_main.get_active_session() args = parser.parse_args(args) print = pacu_main.print get_regions = pacu_main.get_regions # Get a list of all regions from which to get ECR data regions = args.regions.split(',') if args.regions else get_regions('all') summary_data = {} # Initialize data summary_data['ecr'] = {} summary_data['ecr']['regions'] = {} # Prepare output file to store ECR data now = time.time() outfile_path = str(downloads_dir()/f"ecr_enum_{now}.json") # Loop through each region to get ECR data one-by-one for region in regions: # Keep count of ECR repos num_repos_found = 0 # Maintain a regional count of ECR repositories and images region_repositories = [] region_images = [] print('Checking region {} for ECR Repositories...'.format(region)) client = pacu_main.get_boto3_client('ecr', region) # Get all the ECR repositories for the region response = {} try: response = client.describe_repositories() region_repositories.extend(response['repositories']) while 'nextToken' in response: response = client.describe_repositories( nextToken=response['nextToken'] ) region_repositories.extend(response['repositories']) # Assuming we get any ECR repositories for the region if region_repositories: # Count the number of repositories found num_repos_found += len(region_repositories) # Extract the repository name for this region's repository repo_names = [repo_info['repositoryName'] for repo_info in region_repositories] # Extract each image for the repository by repo name for repo_name in repo_names: response = client.describe_images( repositoryName=repo_name ) region_images.extend( response['imageDetails'] ) while 'nextToken' in response: response = client.describe_images( repositoryName=repo_name, nextToken=response['nextToken'] ) region_images.extend( response['imageDetails'] ) # Let the user know how many repos we have found print("Number of repos found for region, {}: {}".format( region, len(region_repositories) ) ) # Count number of images discovered for this region if region_images: print("Number of images found for ALL repos in region, {}: {}".format( region, len(region_images) ) ) # Adding repositories to region for extraction ater on summary_data['ecr']['regions'][region] = {} summary_data['ecr']['regions'][region]['num_repos_found'] = num_repos_found summary_data['ecr']['regions'][region]['repositories'] = region_repositories summary_data['ecr']['regions'][region]['repo_images'] = region_images except Exception as err: print('No ECR repositories retrieved for region: {}'.format(region)) print('Error class: {}, Error message: {}'.format(err.__class__, str(err))) # Write all the data to the output file print("Writing all ECR results to file: {}".format(outfile_path)) with open(outfile_path, "w+") as f: f.write( json.dumps(summary_data, indent=4, default=str) ) return summary_data
def main(args, pacu_main): session = pacu_main.get_active_session() args = parser.parse_args(args) print = pacu_main.print input = pacu_main.input fetch_data = pacu_main.fetch_data get_regions = pacu_main.get_regions data = { 'num_certs': 0, 'certs': {}, 'certs_info': {}, 'certs_chain': {}, 'num_cas': 0, 'cas': {}, 'num_certs_expired': 0, 'certs_expired': {} } # Get the regions to check if args.regions == "all": regions = get_regions('all') else: regions = args.regions.split(',') for region in regions: # Get the ACM client for the region client = pacu_main.get_boto3_client('acm', region) ca_client = pacu_main.get_boto3_client('acm-pca', region) if args.all or args.certs_list: # To get to each certs page next_token = None finished = False certs_list = [] while not finished: try: print("Listing ACM certificate ARNs for region: {}".format( region)) if next_token: response = client.list_certificates( NextToken=next_token, MaxItems=50) else: response = client.list_certificates(MaxItems=50) certs_list = response['CertificateSummaryList'] if certs_list: num_certs_found = len(certs_list) print('Found {} certs for region: {}'.format( len(certs_list), region)) data['num_certs'] += num_certs_found if region not in data['certs']: data['certs'][region] = [] data['certs'][region].extend(certs_list) if 'NextToken' in response: next_token = response['NextToken'] else: finished = True except Exception as err: print( "Exception listing ACM Certificate ARNs for region: {}" .format(region)) print(" Error: {}, {}".format(err.__class__, str(err))) if args.all or args.certs_expired_list: # To get to each certs page next_token = None finished = False certs_list = [] while not finished: try: print( "Listing ACM certificate ARNs which are EXPIRED for region: {}" .format(region)) if next_token: response = client.list_certificates( CertificateStatuses=['EXPIRED'], NextToken=next_token, MaxItems=50) else: response = client.list_certificates( CertificateStatuses=['EXPIRED'], MaxItems=50) certs_list = response['CertificateSummaryList'] if certs_list: num_certs_found = len(certs_list) print('Found {} expired cert(s) for region: {}'.format( len(certs_list), region)) data['num_certs_expired'] += num_certs_found if region not in data['certs_expired']: data['certs_expired'][region] = [] data['certs_expired'][region].extend(certs_list) if 'NextToken' in response: next_token = response['NextToken'] else: finished = True except Exception as err: print( "Exception listing Expired ACM Certificate ARNs for region: {}" .format(region)) print(" Error: {}, {}".format(err.__class__, str(err))) if args.all or args.certs_chain: if region in data['certs'] and data['certs'][region]: print("Getting certs, and their chain for region: {}".format( region)) try: for cert_arn_domain in data['certs'][region]: cert_arn = cert_arn_domain.get('CertificateArn', '') domain = cert_arn_domain.get('DomainName', '') print("Getting info about cert: {} for region: {}". format(cert_arn, region)) response = client.get_certificate( CertificateArn=cert_arn) data['certs_chain'][cert_arn] = response except Exception as err: print( "Exception getting ACM Certificate ARN: {}, Domain: {} for region: {}" .format(cert_arn, domain, region)) print(" Error: {}, {}".format(err.__class__, str(err))) if args.all or args.certs_info: if region in data['certs'] and data['certs'][region]: print("Describing certs: {}".format(region)) try: for cert_arn_domain in data['certs'][region]: cert_arn = cert_arn_domain.get('CertificateArn', '') domain = cert_arn_domain.get('DomainName', '') print("Getting info about cert: {} for region: {}". format(cert_arn, region)) response = client.describe_certificate( CertificateArn=cert_arn) data['certs_info'][cert_arn] = response except Exception as err: print( "Exception getting ACM Certificate ARN: {}, Domain: {} for region: {}" .format(cert_arn, domain, region)) print(" Error: {}, {}".format(err.__class__, str(err))) if args.all or args.ca_list: # To get to each certs page next_token = None finished = False while not finished: try: print("Listing ACM Private CAs for region: {}".format( region)) if next_token: response = ca_client.list_certificate_authorities( NextToken=next_token, MaxResults=50) else: response = ca_client.list_certificate_authorities( MaxResults=50) ca_list = response['CertificateAuthorities'] if ca_list: num_cas_found = len(ca_list) print('Found {} CAs for region: {}'.format( len(ca_list), region)) data['num_cas'] += num_cas_found if region not in data['cas']: data['cas'][region] = [] data['certs'][region].extend(ca_list) if 'NextToken' in response: next_token = response['NextToken'] else: finished = True except Exception as err: print("Exception listing ACM CAs for region: {}".format( region)) print(" Error: {}, {}".format(err.__class__, str(err))) # Prepare the out file names to write output data to now = time.time() outfiles = {} outfiles['certs'] = str(downloads_dir() / 'acm_enum_certs_{}.json').format(now) outfiles['certs_info'] = str(downloads_dir() / 'acm_enum_certs_info_{}.json'.format(now)) outfiles['certs_chain'] = str(downloads_dir() / 'acm_enum_certs_chain_{}.json').format(now) outfiles['cas'] = str(downloads_dir() / 'acm_enum_cas_{}.json').format(now) outfiles['certs_expired'] = str( downloads_dir() / 'acm_enum_certs_expired_{}.json').format(now) # Write the relevant output to the output files for info_type, outfile in outfiles.items(): if data[info_type]: print("Writing info: {} to outfile: {}".format(info_type, outfile)) with open(outfile, 'w+') as f: f.write(json.dumps(data[info_type], indent=4, default=str)) # Write the info about certs to outfile return data
def summary(data, pacu_main: 'Main'): session = pacu_main.get_active_session() out = ' Downloaded EC2 User Data for {} instance(s) and {} launch template(s) to {}/ec2_user_data/.\n'.format( downloads_dir(), data['instance_downloads'], data['template_downloads'], session.name) return out
def main(args, pacu_main): session = pacu_main.get_active_session() print = pacu_main.print get_regions = pacu_main.get_regions regions = get_regions('cloudformation') def find_secrets(string): detections = regex_checker(string) [ Color.print( Color.GREEN, '\tDetected {}: {}'.format(itemkey, detections[itemkey])) for itemkey in detections ] def outfile(subdir, filename): base_path = downloads_dir() / f"{module_info['name']}/{subdir}/" if not os.path.exists(base_path): os.makedirs(base_path) return open(base_path / filename, 'a+') class DateTimeEncoder(JSONEncoder): def default(self, obj): if isinstance(obj, (datetime.date, datetime.datetime)): return obj.isoformat() all_stacks = [] found_regions = [] for region in regions: client = pacu_main.get_boto3_client('cloudformation', region) print( 'Looking for CloudFormation Stacks in region {}...'.format(region)) stacks_data = client.describe_stacks() stacks = stacks_data['Stacks'] all_stacks += stacks if stacks_data['Stacks']: print('Getting exports for region: {}'.format(region)) exports = client.list_exports() if exports: with outfile('exports', region) as (f): json.dump(exports, f, indent=1) find_secrets(json.dumps(exports)) while 'NextToken' in stacks_data: stacks_data = client.describe_stacks( NextToken=(stacks_data['NextToken'])) stacks += stacks_data['Stacks'] if stacks_data['Stacks']: found_regions.append(region) for stack in stacks: with outfile('stacks/{}'.format(region), stack['StackId'].replace('/', '-')) as (f): json.dump(stack, f, indent=1, cls=DateTimeEncoder) print('Getting template for stack: {}'.format( stack['StackId'])) find_secrets(json.dumps(stack, cls=DateTimeEncoder)) try: templates = client.get_template( StackName=(stack['StackId'])) with outfile('templates/{}'.format(region), stack['StackId'].replace('/', '-')) as (f): json.dump(templates, f, indent=1) except: continue find_secrets(json.dumps(templates)) stacks_data = {} return { 'region_count': len(found_regions), 'stack_count': len(all_stacks), 'output_path': downloads_dir() / f"{module_info['name']}/*", }
def main(args, pacu_main): session = pacu_main.get_active_session() print = pacu_main.print get_regions = pacu_main.get_regions fetch_data = pacu_main.fetch_data args = parser.parse_args(args) regions = args.regions.split(',') if args.regions else get_regions( 'lightsail') instances = [] if args.instances is not None: # need to update this to include the regions of these IDs for instance in args.instances.split(','): instance_name = instance.split('@')[0] region = instance.split('@')[1] protocol = instance.split('@')[2] if region not in regions: print(' {} is not a valid region'.format(region)) continue else: instances.append({ 'name': instance_name, 'protocol': protocol, 'region': region, }) else: print('Targeting all Lightsail instances...') if fetch_data(['Lightsail'], module_info['prerequisite_modules'][0], '--instances') is False: print('Pre-req module not run successfully. Exiting...') return for instance in session.Lightsail['instances']: if instance['region'] in regions: protocol = 'rdp' if 'Windows' in instance[ 'blueprintName'] else 'ssh' instances.append({ 'name': instance['name'], 'protocol': protocol, 'region': instance['region'], }) temp_keys = {} for instance in instances: temp_keys[instance['region']] = [] for instance in instances: client = pacu_main.get_boto3_client('lightsail', instance['region']) print(' Instance {}'.format(instance['name'])) try: response = client.get_instance_access_details( instanceName=instance['name'], protocol=instance['protocol']) temp_keys[instance['region']].append(response['accessDetails']) print(' Successfully created temporary access for {}'.format( instance['name'])) except ClientError as error: code = error.response['Error']['Code'] if code == 'AccessDeniedException': print(' Unauthorized to generate temporary access.') return elif code == 'OperationFailureException': print( ' FAILED: Unable to interact with non-running instance.' ) continue else: print(error) break write_keys_to_file(temp_keys, session) windows_count = 0 ssh_count = 0 for region in temp_keys: for credential in temp_keys[region]: if credential['protocol'] == 'rdp': windows_count += 1 else: ssh_count += 1 if windows_count or ssh_count: written_file_path = os.path.join(downloads_dir(), module_info['name']) else: written_file_path = None summary_data = { 'windows': windows_count, 'linux': ssh_count, 'written_file_path': written_file_path, } return summary_data
def main(args, pacu_main): session = pacu_main.get_active_session() ###### Don't modify these. They can be removed if you are not using the function. args = parser.parse_args(args) print = pacu_main.print get_regions = pacu_main.get_regions ###### rds_data = deepcopy(session.RDS) if 'Snapshots' not in rds_data.keys(): rds_data['Snapshots'] = [] session.update(pacu_main.database, RDS=rds_data) if args.regions is None: regions = get_regions('rds') if regions is None or regions == [] or regions == '' or regions == {}: print( 'This module is not supported in any regions specified in the current sessions region set. Exiting...' ) return else: regions = args.regions.split(',') now = time.time() all_snaps = [] snapshots_csv_data = [] shared_by_other_account_snapshots = [] snapshot_permissions = { 'Public': [], 'Shared': {}, 'Private': [], } def fetch_rds_data(client, func, key, print, **kwargs): caller = getattr(client, func) try: response = caller(**kwargs) data = response[key] if isinstance(data, (dict, str)): return data while 'Marker' in response: response = caller({**kwargs, **{'Marker': response['Marker']}}) data.extend(response[key]) return data except ClientError as error: print(' FAILURE:') code = error.response['Error']['Code'] if code == 'AccessDenied': print(' MISSING NEEDED PERMISSIONS for ' + func) else: print(code) print(' Skipping ' + func) return [] # Create rds directory directory = str(downloads_dir() / 'rds') if not os.path.exists(directory): os.makedirs(directory) for region in regions: print( 'Starting region {} (this may take a while if there are thousands of RDS snapshots)...' .format(region)) # Start RDS Snapshots in this region count = 0 rdsClient = pacu_main.get_boto3_client('rds', region) # Enumerate normal snapshots rdsSnapshotData = fetch_rds_data( rdsClient, 'describe_db_snapshots', 'DBSnapshots', print, IncludeShared=not args.no_include_shared, MaxRecords=100) # Enumerate cluster snapshots rdsClusterSnapshotData = fetch_rds_data( rdsClient, 'describe_db_cluster_snapshots', 'DBClusterSnapshots', print, IncludeShared=not args.no_include_shared, MaxRecords=100) # Check basic info for normal snapshots for snapshot in rdsSnapshotData: all_snaps.append(snapshot) snapshot['Region'] = region if snapshot['Encrypted'] is False: snapshots_csv_data.append('{},{}\n'.format( snapshot['DBSnapshotIdentifier'], region)) if snapshot['SnapshotType'] == 'shared': shared_by_other_account_snapshots.append( snapshot['DBSnapshotIdentifier']) # Check basic info for cluster snapshots for clusterSnapshot in rdsClusterSnapshotData: all_snaps.append(clusterSnapshot) clusterSnapshot['Region'] = region if clusterSnapshot['StorageEncrypted'] is False: snapshots_csv_data.append('{},{}\n'.format( clusterSnapshot['DBClusterSnapshotIdentifier'] + " (cluster)", region)) if clusterSnapshot['SnapshotType'] == 'shared': shared_by_other_account_snapshots.append( clusterSnapshot['DBClusterSnapshotIdentifier'] + " (cluster)") # Handle permissions check for both types of snapshots if args.snapshot_permissions and (rdsSnapshotData or rdsClusterSnapshotData): print( ' Starting enumeration for own account\'s Snapshot Permissions...' ) for snapshot in rdsSnapshotData: if snapshot['SnapshotType'] == 'shared': continue # Ignore permission check for snapshots shared by other account so move on to next snapshot # Automated snapshots are always private if snapshot['SnapshotType'] == 'automated': snapshot_permissions['Private'].append( snapshot['DBSnapshotIdentifier']) # Only manual snapshots will be updated with RestoreAttributeValues else: attributes = fetch_rds_data( rdsClient, "describe_db_snapshot_attributes", 'DBSnapshotAttributesResult', print, DBSnapshotIdentifier=snapshot['DBSnapshotIdentifier']) # If user does not have permission to DescribeDBSnapshotAttributes, skip this loop if attributes == []: break for attr in attributes['DBSnapshotAttributes']: if attr['AttributeName'] == 'restore': snapshot['RestoreAttributeValues'] = attr[ 'AttributeValues'] break if not snapshot['RestoreAttributeValues']: snapshot_permissions['Private'].append( snapshot['DBSnapshotIdentifier']) elif snapshot['RestoreAttributeValues'][0] == 'all': snapshot_permissions['Public'].append( snapshot['DBSnapshotIdentifier']) else: snapshot_permissions['Shared'][ snapshot['DBSnapshotIdentifier']] = snapshot[ 'RestoreAttributeValues'] for clusterSnapshot in rdsClusterSnapshotData: if clusterSnapshot['SnapshotType'] == 'shared': continue # Ignore permission check for snapshots shared by other account so move on to next snapshot # Automated snapshots are always private if clusterSnapshot['SnapshotType'] == 'automated': snapshot_permissions['Private'].append( clusterSnapshot['DBClusterSnapshotIdentifier'] + " (cluster)") # Only manual snapshots will be updated with RestoreAttributeValues else: attributes = fetch_rds_data( rdsClient, "describe_db_cluster_snapshot_attributes", 'DBClusterSnapshotAttributesResult', print, DBClusterSnapshotIdentifier=clusterSnapshot[ 'DBClusterSnapshotIdentifier']) # If user does not have permission to DescribeDBClusterSnapshotAttributes, skip this loop if attributes == []: break for attr in attributes['DBClusterSnapshotAttributes']: if attr['AttributeName'] == 'restore': clusterSnapshot['RestoreAttributeValues'] = attr[ 'AttributeValues'] break if not clusterSnapshot['RestoreAttributeValues']: snapshot_permissions['Private'].append( clusterSnapshot['DBClusterSnapshotIdentifier'] + " (cluster)") elif clusterSnapshot['RestoreAttributeValues'][0] == 'all': snapshot_permissions['Public'].append( clusterSnapshot['DBClusterSnapshotIdentifier'] + " (cluster)") else: snapshot_permissions['Shared'][ clusterSnapshot['DBClusterSnapshotIdentifier'] + " (cluster)"] = clusterSnapshot[ 'RestoreAttributeValues'] count += len(rdsSnapshotData) count += len(rdsClusterSnapshotData) print(' {} snapshot(s) found'.format(count)) summary_data = {'snapshot_permissions': args.snapshot_permissions} rds_data['Snapshots'] = all_snaps summary_data['snapshots'] = len(rds_data['Snapshots']) unencrypted_snapshots_csv_path = '{}/unencrypted_rds_snapshots_{}.csv'.format( directory, now) if (snapshots_csv_data): with open(unencrypted_snapshots_csv_path, 'w+') as unencrypted_snapshots_csv: unencrypted_snapshots_csv.write('Snapshot Identifier ,Region\n') print(' Writing data for {} unencrypted snapshots...'.format( len(snapshots_csv_data))) for line in snapshots_csv_data: unencrypted_snapshots_csv.write(line) summary_data['snapshots_csv_path'] = unencrypted_snapshots_csv_path if not args.no_include_shared: summary_data['Shared by other account'] = len( shared_by_other_account_snapshots) else: summary_data['Shared by other account'] = False if args.snapshot_permissions: permission_data = { 'Public': len(snapshot_permissions['Public']), 'Shared': len(snapshot_permissions['Shared']), 'Private': len(snapshot_permissions['Private']), } temp = permission_data.copy() summary_data.update(temp) path = str(downloads_dir() / f"rds_snapshot_permissions_{now}.txt") with open(path, 'w') as out_file: out_file.write('Public:\n') for public in snapshot_permissions['Public']: out_file.write(' {}\n'.format(public)) out_file.write('Shared:\n') for snap in snapshot_permissions['Shared']: out_file.write(' {}\n'.format(snap)) for aws_id in snapshot_permissions['Shared'][snap]: out_file.write(' {}\n'.format(aws_id)) out_file.write('Private:\n') for private in snapshot_permissions['Private']: out_file.write(' {}\n'.format(private)) if not args.no_include_shared: out_file.write('Shared by other account:\n') for sharedWithAccount in shared_by_other_account_snapshots: out_file.write(' {}\n'.format(sharedWithAccount)) summary_data['snapshot-permissions-path'] = path session.update(pacu_main.database, RDS=rds_data) return summary_data
def main(args, pacu): """Main module function, called from Pacu""" print = pacu.print session = pacu.get_active_session() args = parser.parse_args(args) outfile_path = str(downloads_dir() / 'apigateway') if args.regions: regions = args.regions.split(',') else: regions = pacu.get_regions('apigateway') # Set up summary data object # apis[] holds each api object which contains api info an route info # apikeys[] holds all api keys # clientCerts[] holds all client certs summary_data = {'apis': [], 'apiKeys': [], 'clientCerts': []} for region in regions: client = pacu.get_boto3_client('apigateway', region) print(f"Enumerating {region}") # Get global API data summary_data['apiKeys'] = get_api_keys(client) summary_data['clientCerts'] = get_client_certs(client) # Currently this only supports REST apis # Get all apis in AWS Gatway response = client.get_rest_apis() items = response['items'] # for each api in the account for api in items: # create api objecy summary api_summary = { 'id': '', 'name': '', 'stages': [], 'urlBase': "", 'urlPaths': [], "apiDocs": [] } # Set up base info used by methods api_summary['id'] = api['id'] api_summary['name'] = api['name'] api_summary['stages'] = get_api_stages(client, api_summary['id']) api_summary[ 'urlBase'] = f"https://{api_summary['id']}.execute-api.{region}.amazonaws.com/" print(f"Enumerating API: {api_summary['name']}") # For each resource get all methods and parse it into it's method summary. for resource in get_api_resources(client, api_summary['id']): for method in get_api_methods(client, api_summary['id'], resource): api_summary['urlPaths'].append( parse_method(api_summary['urlBase'], method, resource['path'], api_summary['stages'])) # Append api results to main summary summary_data['apis'].append(api_summary) # attempt to export api_docs api_summary['apiDocs'] = export_api_doc(client, session, api_summary) # Write summary all data to downloads file if (len(summary_data['apis']) > 0): print("Writing all results to file: {}/".format(outfile_path)) filename = f"apigateway_{region}_{time.time()}.json" with open(outfile_path / filename, "w+") as f: f.write(json.dumps(summary_data, indent=4, default=str)) f.close() return summary_data
def main(args, pacu_main: 'Main'): session = pacu_main.get_active_session() print = pacu_main.print get_regions = pacu_main.get_regions args = parser.parse_args(args) created_keys = {} imported_keys = 0 name = args.key_name regions = args.regions.split(',') if args.regions else get_regions( 'lightsail') for region in regions: print('Starting region {}...'.format(region)) client = pacu_main.get_boto3_client('lightsail', region) try: if args.import_key_file is None: print(' Creating new key...') response = client.create_key_pair(keyPairName=name) created_keys[region] = { 'name': name, 'private': response['privateKeyBase64'], 'public': response['publicKeyBase64'] } else: print(' Importing key...') try: with open(args.import_key_file, 'r') as key_file: key = key_file.read() except IOError: print('Error opening key file.') break response = client.import_key_pair(keyPairName=name, publicKeyBase64=key) print(' Key successfully imported for {}'.format(region)) imported_keys += 1 except ClientError as error: code = error.response['Error']['Code'] if code == 'AccessDeniedException': print(' Unauthorized to add key pair to Lightsail.') elif 'already in use' in str(error): print(' Key name "{}" already in use.'.format(name)) continue break except client.exceptions.InvalidInputException as error: print('Invalid key format provided.') break for region in created_keys: ssh_key_dir = os.path.join(downloads_dir(), module_info['name'], region) if not os.path.exists(ssh_key_dir): os.makedirs(ssh_key_dir) private_key_file_dir = os.path.join(ssh_key_dir, created_keys[region]['name']) public_key_file_dir = os.path.join( ssh_key_dir, created_keys[region]['name'] + '.pub') try: with open(private_key_file_dir, 'w') as private_key_file: private_key_file.write(created_keys[region]['private']) with open(public_key_file_dir, 'w') as public_key_file: public_key_file.write(created_keys[region]['public']) except IOError: print('Error writing key pair {} to file'.format( created_keys[region]['name'])) continue summary_data = {'keys': len(created_keys.keys()), 'imports': imported_keys} return summary_data
def outfile(subdir, filename): base_path = downloads_dir() / f"{module_info['name']}/{subdir}/" if not os.path.exists(base_path): os.makedirs(base_path) return open(base_path / filename, 'a+')