def create_tickets_rds_unencrypted_instances(self): """ Class method to create jira tickets """ table_name = self.config.rdsEncrypt.ddb_table_name main_account = Account(region=self.config.aws.region) ddb_table = main_account.resource("dynamodb").Table(table_name) jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.aws.accounts.items(): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_not_closed_issues( ddb_table, account_id, RdsEncryptionIssue) for issue in issues: instance_id = issue.issue_id instance_name = issue.issue_details.name region = issue.issue_details.region tags = issue.issue_details.tags # issue has been already reported if issue.timestamps.reported is not None: owner = issue.jira_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product if issue.status in [ IssueStatus.Resolved, IssueStatus.Whitelisted ]: logging.debug( f"Closing {issue.status.value} RDS unencrypted instance '{instance_name}' issue" ) comment = ( f"Closing {issue.status.value} RDS unencrypted instance '{instance_name}' issue " f"in '{account_name} / {account_id}' account, '{region}' region" ) if issue.status == IssueStatus.Whitelisted: # Adding label with "whitelisted" to jira ticket. jira.add_label(ticket_id=issue.jira_details.ticket, label=IssueStatus.Whitelisted.value) jira.close_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_closed(ddb_table, issue) # issue.status != IssueStatus.Closed (should be IssueStatus.Open) elif issue.timestamps.updated > issue.timestamps.reported: logging.error( f"TODO: update jira ticket with new data: {table_name}, {account_id}, {instance_name}" ) slack.report_issue( msg= f"RDS unencrypted instance '{instance_name}' issue is changed " f"in '{account_name} / {account_id}' account, '{region}' region" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_updated(ddb_table, issue) else: logging.debug(f"No changes for '{instance_name}'") # issue has not been reported yet else: logging.debug( f"Reporting RDS unencrypted instance '{instance_name}' issue" ) owner = tags.get("owner", None) bu = tags.get("bu", None) product = tags.get("product", None) issue_summary = ( f"RDS unencrypted instance '{instance_name}'" f"in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}" ) issue_description = ( f"The RDS instance is unencrypted.\n\n" f"*Threat*: " f"Based on data protection policies, data that is classified as sensitive information or " f"intellectual property of the organization needs to be encrypted. Additionally, as part of the " f"initiative of Encryption Everywhere, it is necessary to encrypt the data in order to ensure the " f"confidentiality and integrity of the data.\n\n" f"*Risk*: High\n\n" f"*Account Name*: {account_name}\n" f"*Account ID*: {account_id}\n" f"*Region*: {region}\n" f"*RDS Instance ID*: {instance_id}\n") issue_description += JiraOperations.build_tags_table(tags) issue_description += "\n" issue_description += (f"*Recommendation*: " f"Encrypt RDS instance.") try: response = jira.add_issue( issue_summary=issue_summary, issue_description=issue_description, priority="Major", labels=["rds-unencrypted-instances"], owner=owner, account_id=account_id, bu=bu, product=product, ) except Exception: logging.exception("Failed to create jira ticket") continue if response is not None: issue.jira_details.ticket = response.ticket_id issue.jira_details.ticket_assignee_id = response.ticket_assignee_id issue.jira_details.owner = owner issue.jira_details.business_unit = bu issue.jira_details.product = product slack.report_issue( msg=f"Discovered {issue_summary}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_reported(ddb_table, issue)
def create_tickets_rds_public_snapshots(self): """ Class method to create jira tickets """ table_name = self.config.rdsSnapshot.ddb_table_name main_account = Account(region=self.config.aws.region) ddb_table = main_account.resource("dynamodb").Table(table_name) jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.aws.accounts.items(): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_not_closed_issues( ddb_table, account_id, RdsPublicSnapshotIssue) for issue in issues: snapshot_id = issue.issue_id region = issue.issue_details.region tags = issue.issue_details.tags # issue has been already reported if issue.timestamps.reported is not None: owner = issue.jira_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product if issue.status in [ IssueStatus.Resolved, IssueStatus.Whitelisted ]: logging.debug( f"Closing {issue.status.value} RDS public snapshot '{snapshot_id}' issue" ) comment = ( f"Closing {issue.status.value} RDS public snapshot '{snapshot_id}' issue " f"in '{account_name} / {account_id}' account, '{region}' region" ) jira.close_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_closed(ddb_table, issue) # issue.status != IssueStatus.Closed (should be IssueStatus.Open) elif issue.timestamps.updated > issue.timestamps.reported: logging.error( f"TODO: update jira ticket with new data: {table_name}, {account_id}, {snapshot_id}" ) slack.report_issue( msg= f"RDS public snapshot '{snapshot_id}' issue is changed " f"in '{account_name} / {account_id}' account, '{region}' region" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_updated(ddb_table, issue) else: logging.debug(f"No changes for '{snapshot_id}'") # issue has not been reported yet else: logging.debug( f"Reporting RDS public snapshot '{snapshot_id}' issue") owner = tags.get("owner", None) bu = tags.get("bu", None) product = tags.get("product", None) issue_summary = ( f"RDS public snapshot '{snapshot_id}'" f"in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}" ) issue_description = ( f"The RDS snapshot is marked as public.\n\n" f"*Risk*: High\n\n" f"*Account Name*: {account_name}\n" f"*Account ID*: {account_id}\n" f"*Region*: {region}\n" f"*RDS Snapshot ID*: {snapshot_id}\n") auto_remediation_date = ( self.config.now + self.config.rdsSnapshot.issue_retention_date).date() issue_description += f"\n{{color:red}}*Auto-Remediation Date*: {auto_remediation_date}{{color}}\n\n" issue_description += JiraOperations.build_tags_table(tags) issue_description += "\n" issue_description += ( f"*Recommendation*: " f"Unless you are certain you want to share all the data in the snapshot with " f"all AWS accounts and users, modify the permissions: mark the snapshot as private, " f"and then specify the accounts that you want to give permissions to." ) try: response = jira.add_issue( issue_summary=issue_summary, issue_description=issue_description, priority="Major", labels=["rds-public-snapshots"], owner=owner, account_id=account_id, bu=bu, product=product, ) except Exception: logging.exception("Failed to create jira ticket") continue if response is not None: issue.jira_details.ticket = response.ticket_id issue.jira_details.ticket_assignee_id = response.ticket_assignee_id issue.jira_details.owner = owner issue.jira_details.business_unit = bu issue.jira_details.product = product slack.report_issue( msg=f"Discovered {issue_summary}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_reported(ddb_table, issue)
def create_tickets_s3buckets(self): """ Class method to create jira tickets """ table_name = self.config.s3acl.ddb_table_name main_account = Account(region=self.config.aws.region) ddb_table = main_account.resource("dynamodb").Table(table_name) jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.s3acl.accounts.items(): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_not_closed_issues( ddb_table, account_id, S3AclIssue) for issue in issues: bucket_name = issue.issue_id tags = issue.issue_details.tags # issue has been already reported if issue.timestamps.reported is not None: owner = issue.issue_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product if issue.status in [ IssueStatus.Resolved, IssueStatus.Whitelisted ]: logging.debug( f"Closing {issue.status.value} S3 bucket '{bucket_name}' public ACL issue" ) comment = ( f"Closing {issue.status.value} S3 bucket '{bucket_name}' public ACL issue " f"in '{account_name} / {account_id}' account") if issue.status == IssueStatus.Whitelisted: # Adding label with "whitelisted" to jira ticket. jira.add_label(ticket_id=issue.jira_details.ticket, label=IssueStatus.Whitelisted.value) jira.close_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_closed(ddb_table, issue) # issue.status != IssueStatus.Closed (should be IssueStatus.Open) elif issue.timestamps.updated > issue.timestamps.reported: logging.debug( f"Updating S3 bucket '{bucket_name}' public ACL issue" ) comment = "Issue details are changed, please check again.\n" comment += self.build_permissions_table( issue.issue_details.public_acls) comment += JiraOperations.build_tags_table(tags) jira.update_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg= f"S3 bucket '{bucket_name}' pubic ACL issue is changed " f"in '{account_name} / {account_id}' account" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_updated(ddb_table, issue) else: logging.debug(f"No changes for '{bucket_name}'") # issue has not been reported yet else: logging.debug( f"Reporting S3 bucket '{bucket_name}' public ACL issue" ) owner = tags.get("owner", None) bu = tags.get("bu", None) product = tags.get("product", None) if bu is None: bu = self.config.get_bu_by_name(bucket_name) issue_summary = ( f"S3 bucket '{bucket_name}' with public acl " f"in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}" ) issue_description = ( f"Bucket ACL allows unrestricted public access.\n\n" f"*Threat*: " f"This creates potential security vulnerabilities by allowing anyone to add, modify, or remove items in a bucket.\n\n" f"*Risk*: High\n\n" f"*Account Name*: {account_name}\n" f"*Account ID*: {account_id}\n" f"*S3 Bucket name*: {bucket_name}\n" f"*Bucket Owner*: {owner}\n" f"\n") auto_remediation_date = ( self.config.now + self.config.s3acl.issue_retention_date).date() issue_description += f"\n{{color:red}}*Auto-Remediation Date*: {auto_remediation_date}{{color}}\n\n" issue_description += JiraOperations.build_tags_table(tags) issue_description += self.build_permissions_table( issue.issue_details.public_acls) issue_description += f"\n" issue_description += ( f"*Recommendation*: " f"Update bucket permissions to restrict access to the owner or specific users " f"or grant CloudFront OAI applicable permissions on each object that CloudFront distribution serves. " ) if self.config.whitelisting_procedure_url: issue_description += ( f"For any other exceptions, please follow the [whitelisting procedure|{self.config.whitelisting_procedure_url}] " f"and provide a strong business reasoning. ") try: response = jira.add_issue( issue_summary=issue_summary, issue_description=issue_description, priority="Major", labels=["publics3"], owner=owner, account_id=account_id, bu=bu, product=product, ) except Exception: logging.exception("Failed to create jira ticket") continue if response is not None: issue.jira_details.ticket = response.ticket_id issue.jira_details.ticket_assignee_id = response.ticket_assignee_id issue.jira_details.owner = owner issue.jira_details.business_unit = bu issue.jira_details.product = product slack.report_issue( msg=f"Discovered {issue_summary}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_reported(ddb_table, issue)
def clean_iam_access_keys(self, batch=False): """ Class method to remediate IAM User access keys which are not used """ main_account = Account(region=config.aws.region) ddb_table = main_account.resource("dynamodb").Table( self.config.iamUserInactiveKeys.ddb_table_name) retention_period = self.config.iamUserInactiveKeys.remediation_retention_period jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.aws.accounts.items(): logging.debug("* Account Name:" + account_name + " :::Account ID:::" + account_id) issues = IssueOperations.get_account_open_issues( ddb_table, account_id, IAMKeyInactiveIssue) for issue in issues: key_id = issue.issue_id username = issue.issue_details.username user_in_whitelist = self.config.iamUserInactiveKeys.in_whitelist( account_id, username) key_in_whitelist = self.config.iamUserInactiveKeys.in_whitelist( account_id, key_id) if user_in_whitelist or key_in_whitelist: logging.debug( f"Skipping '{key_id} / {username}' (in whitelist)") continue if issue.timestamps.reported is None: logging.debug( f"Skipping '{key_id} / {username}' (was not reported)") continue if issue.timestamps.remediated is not None: logging.debug( f"Skipping '{key_id} / {username}' (has been already remediated)" ) continue updated_date = issue.timestamp_as_datetime no_of_days_issue_created = (self.config.now - updated_date).days if no_of_days_issue_created >= retention_period: try: if not batch and \ not confirm(f"Do you want to remediate inactive access key '{key_id} / {username}'", False): continue account = Account( id=account_id, name=account_name, role_name=self.config.aws.role_name_reporting) if account.session is None: continue logging.debug( f"Remediating inactive access key '{key_id} / {username}'" ) remediation_succeed = True try: IAMOperations.disable_access_key( account.client("iam"), username, key_id) comment = ( f"Inactive access key '{key_id} / {username}' issue " f"in '{account_name} / {account_id}' account " f"was remediated by hammer") except Exception: remediation_succeed = False logging.exception( "Failed to disable '{key_id} / {username}' inactive access key" ) comment = ( f"Failed to remediate inactive access key '{key_id} / {username}' issue " f"in '{account_name} / {account_id}' account " f"due to some limitations. Please, check manually" ) jira.remediate_issue( ticket_id=issue.jira_details.ticket, comment=comment, reassign=remediation_succeed, ) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", account_id=account_id, ) IssueOperations.set_status_remediated(ddb_table, issue) except Exception: logging.exception( f"Error occurred while disabling '{username} / {key_id}' " f"in '{account_name} / {account_id}'") else: logging.debug( f"Skipping '{key_id} / {username}' " f"({retention_period - no_of_days_issue_created} days before remediation)" )
def create_tickets_public_ami(self): """ Class method to create jira tickets """ table_name = self.config.publicAMIs.ddb_table_name main_account = Account(region=self.config.aws.region) ddb_table = main_account.resource("dynamodb").Table(table_name) jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.publicAMIs.accounts.items( ): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_not_closed_issues( ddb_table, account_id, PublicAMIIssue) for issue in issues: ami_id = issue.issue_id ami_region = issue.issue_details.region tags = issue.issue_details.tags # issue has been already reported if issue.timestamps.reported is not None: owner = issue.issue_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product if issue.status in [ IssueStatus.Resolved, IssueStatus.Whitelisted ]: logging.debug( f"Closing {issue.status.value} AMI '{ami_id}' public access issue" ) comment = ( f"Closing {issue.status.value} AMI '{ami_id}' public access issue " f"in '{account_name} / {account_id}' account, {ami_region} region" ) if issue.status == IssueStatus.Whitelisted: # Adding label with "whitelisted" to jira ticket. jira.add_label(ticket_id=issue.jira_details.ticket, label=IssueStatus.Whitelisted.value) jira.close_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_closed(ddb_table, issue) # issue.status != IssueStatus.Closed (should be IssueStatus.Open) elif issue.timestamps.updated > issue.timestamps.reported: logging.debug( f"Updating AMI '{ami_id}' public access issue") comment = "Issue details are changed, please check again.\n" comment += JiraOperations.build_tags_table(tags) jira.update_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg=f"AMI '{ami_id}' pubic access issue is changed " f"in '{account_name} / {account_id}' account, {ami_region} region" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_updated(ddb_table, issue) else: logging.debug(f"No changes for '{ami_id}'") # issue has not been reported yet else: logging.debug( f"Reporting AMI '{ami_id}' public access issue") owner = tags.get("owner", None) bu = tags.get("bu", None) product = tags.get("product", None) if bu is None: bu = self.config.get_bu_by_name(ami_id) issue_summary = ( f"AMI '{ami_id}' with public access " f"in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}" ) issue_description = (f"AMI allows public access.\n\n" f"*Threat*: " f" .\n\n" f"*Risk*: High\n\n" f"*Account Name*: {account_name}\n" f"*Account ID*: {account_id}\n" f"*Region*: {ami_region}\n" f"*AMI Id*: {ami_id}\n" f"\n") auto_remediation_date = ( self.config.now + self.config.publicAMIs.issue_retention_date).date() issue_description += f"\n{{color:red}}*Auto-Remediation Date*: {auto_remediation_date}{{color}}\n\n" issue_description += JiraOperations.build_tags_table(tags) issue_description += f"\n" issue_description += ( f"*Recommendation*: " f"Check if public access is truly needed and " f"if not - update AMI permissions to restrict access to specific accounts." ) try: response = jira.add_issue( issue_summary=issue_summary, issue_description=issue_description, priority="Major", labels=["public-ami"], owner=owner, account_id=account_id, bu=bu, product=product, ) except Exception: logging.exception("Failed to create jira ticket") continue if response is not None: issue.jira_details.ticket = response.ticket_id issue.jira_details.ticket_assignee_id = response.ticket_assignee_id issue.jira_details.owner = owner issue.jira_details.business_unit = bu issue.jira_details.product = product slack.report_issue( msg=f"Discovered {issue_summary}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_reported(ddb_table, issue)
def clean_security_groups(self, batch=False): """ Class function to clean security groups which are violating aws best practices """ main_account = Account(region=config.aws.region) ddb_table = main_account.resource("dynamodb").Table(self.config.sg.ddb_table_name) backup_bucket = config.aws.s3_backup_bucket retention_period = self.config.sg.remediation_retention_period jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.sg.remediation_accounts.items(): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_open_issues(ddb_table, account_id, SecurityGroupIssue) for issue in issues: group_name = issue.issue_details.name group_vpc_id = issue.issue_details.vpc_id group_id = issue.issue_id group_region = issue.issue_details.region # status = issue.jira_details.status name_in_whitelist = self.config.sg.in_whitelist(account_id, f"{group_vpc_id}:{group_name}") id_in_whitelist = self.config.sg.in_whitelist(account_id, group_id) if name_in_whitelist or id_in_whitelist: logging.debug(f"Skipping '{group_name} / {group_id}' (in whitelist)") # Adding label with "whitelisted" to jira ticket. jira.add_label( ticket_id=issue.jira_details.ticket, label=IssueStatus.Whitelisted.value ) continue if issue.timestamps.reported is None: logging.debug(f"Skipping '{group_name} / {group_id}' (was not reported)") continue if issue.timestamps.remediated is not None: logging.debug(f"Skipping '{group_name} / {group_id}' (has been already remediated)") continue updated_date = issue.timestamp_as_datetime no_of_days_issue_created = (self.config.now - updated_date).days if no_of_days_issue_created >= retention_period: owner = issue.jira_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product try: account = Account(id=account_id, name=account_name, region=group_region, role_name = self.config.aws.role_name_reporting) if account.session is None: continue checker = SecurityGroupsChecker(account=account, restricted_ports=self.config.sg.restricted_ports) checker.check(ids=[group_id]) sg = checker.get_security_group(group_id) if sg is None: logging.debug(f"Security group '{group_name} / {group_id}' was removed by user") elif sg.restricted: logging.debug(f"Security group '{group_name} / {group_id}' issue was remediated by user") elif sg.status != RestrictionStatus.OpenCompletely: logging.debug(f"Security group '{group_name} / {group_id}' is not completely open") else: if not batch and \ not confirm(f"Do you want to remediate security group '{group_name} / {group_id}'", False): continue logging.debug(f"Remediating '{group_name} / {group_id}' rules") backup_path = sg.backup_s3(main_account.client("s3"), backup_bucket) remediation_succeed = True processed = sg.restrict(RestrictionStatus.OpenCompletely) if processed == 0: logging.debug(f"No rules were detected to remediate in '{group_name} / {group_id}'") comment = None elif processed is None: remediation_succeed = False comment = (f"Failed to remediate security group '{group_name} / {group_id}' issue " f"in '{account_name} / {account_id}' account, '{group_region}' region " f"due to some limitations. Please, check manually") else: comment = (f"Rules backup was saved to " f"[{backup_path}|https://s3.console.aws.amazon.com/s3/object/{backup_bucket}/{backup_path}]. " f"Security group '{group_name} / {group_id}' `{RestrictionStatus.OpenCompletely.value}` issue " f"in '{account_name} / {account_id}' account, '{group_region}' region " f"was remediated by hammer") if comment is not None: jira.remediate_issue( ticket_id=issue.jira_details.ticket, comment=comment, reassign=remediation_succeed, ) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_remediated(ddb_table, issue) except Exception: logging.exception(f"Error occurred while updating security group '{group_name} / {group_id}' rules " f"in '{account_name} / {account_id} / {group_region}'") else: logging.debug(f"Skipping '{group_name} / {group_id}' " f"({retention_period - no_of_days_issue_created} days before remediation)")
def cleans3bucketunencrypted(self, batch=False): """ Class method to clean S3 buckets which are violating aws best practices """ main_account = Account(region=config.aws.region) ddb_table = main_account.resource("dynamodb").Table(self.config.s3Encrypt.ddb_table_name) retention_period = self.config.s3Encrypt.remediation_retention_period jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.aws.accounts.items(): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_open_issues(ddb_table, account_id, S3EncryptionIssue) for issue in issues: bucket_name = issue.issue_id in_whitelist = self.config.s3Encrypt.in_whitelist(account_id, bucket_name) in_fixlist = True if in_whitelist: logging.debug(f"Skipping {bucket_name} (in whitelist)") continue if not in_fixlist: logging.debug(f"Skipping {bucket_name} (not in fixlist)") continue if issue.timestamps.reported is None: logging.debug(f"Skipping '{bucket_name}' (was not reported)") continue if issue.timestamps.remediated is not None: logging.debug(f"Skipping {bucket_name} (has been already remediated)") continue updated_date = issue.timestamp_as_datetime no_of_days_issue_created = (self.config.now - updated_date).days if no_of_days_issue_created >= retention_period: owner = issue.jira_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product try: if not batch and \ not confirm(f"Do you want to remediate '{bucket_name}' S3 bucket unencrypted", False): continue account = Account(id=account_id, name=account_name, role_name=self.config.aws.role_name_reporting) if account.session is None: continue checker = S3EncryptionChecker(account=account) checker.check(buckets=[bucket_name]) s3bucket = checker.get_bucket(bucket_name) if s3bucket is None: logging.debug(f"Bucket {s3bucket.name} was removed by user") elif s3bucket.encrypted: logging.debug(f"Bucket {s3bucket.name} unencrypted issue was remediated by user") else: logging.debug(f"Remediating '{s3bucket.name}' unencrypted") # kms_key_id = None remediation_succeed = True if s3bucket.encrypt_bucket(): comment = (f"Bucket '{s3bucket.name}' unencrypted issue " f"in '{account_name} / {account_id}' account " f"was remediated by hammer") else: remediation_succeed = False comment = (f"Failed to remediate bucket '{s3bucket.name}' unencrypted issue " f"in '{account_name} / {account_id}' account " f"due to some limitations. Please, check manually") jira.remediate_issue( ticket_id=issue.jira_details.ticket, comment=comment, reassign=remediation_succeed, ) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_remediated(ddb_table, issue) except Exception: logging.exception(f"Error occurred while updating bucket '{bucket_name}' unencrypted " f"in '{account_name} / {account_id}'") else: logging.debug(f"Skipping '{bucket_name}' " f"({retention_period - no_of_days_issue_created} days before remediation)")
def create_tickets_securitygroups(self): """ Class function to create jira tickets """ table_name = self.config.sg.ddb_table_name main_account = Account(region=self.config.aws.region) ddb_table = main_account.resource("dynamodb").Table(table_name) jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.sg.accounts.items(): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_not_closed_issues(ddb_table, account_id, SecurityGroupIssue) for issue in issues: group_id = issue.issue_id group_name = issue.issue_details.name group_region = issue.issue_details.region group_vpc_id = issue.issue_details.vpc_id tags = issue.issue_details.tags # issue has been already reported if issue.timestamps.reported is not None: owner = issue.jira_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product if issue.status in [IssueStatus.Resolved, IssueStatus.Whitelisted]: logging.debug(f"Closing {issue.status.value} security group '{group_name} / {group_id}' issue") comment = (f"Closing {issue.status.value} security group '{group_name} / {group_id}' issue " f"in '{account_name} / {account_id}' account, '{group_region}' region") if issue.status == IssueStatus.Whitelisted: # Adding label with "whitelisted" to jira ticket. jira.add_label( ticket_id=issue.jira_details.ticket, label=IssueStatus.Whitelisted.value ) jira.close_issue( ticket_id=issue.jira_details.ticket, comment=comment ) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_closed(ddb_table, issue) # issue.status != IssueStatus.Closed (should be IssueStatus.Open) elif issue.timestamps.updated > issue.timestamps.reported: logging.debug(f"Updating security group '{group_name} / {group_id}' issue") comment = "Issue details are changed, please check again.\n" comment += self.build_open_ports_table_jira(issue.issue_details.perms) comment += JiraOperations.build_tags_table(tags) jira.update_issue( ticket_id=issue.jira_details.ticket, comment=comment ) slack.report_issue( msg=f"Security group '{group_name} / {group_id}' issue is changed " f"in '{account_name} / {account_id}' account, '{group_region}' region" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}" f"\n" f"{self.build_open_ports_table_slack(issue.issue_details.perms)}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_updated(ddb_table, issue) else: logging.debug(f"No changes for '{group_name} / {group_id}'") # issue has not been reported yet else: logging.debug(f"Reporting security group '{group_name} / {group_id}' issue") status = RestrictionStatus(issue.issue_details.status) # if owner/bu/product tags exist on security group - use it group_owner = tags.get("owner", None) group_bu = tags.get("bu", None) group_product = tags.get("product", None) open_port_details = self.build_open_ports_table_jira(issue.issue_details.perms) account_details = (f"*Risk*: High\n\n" f"*Account Name*: {account_name}\n" f"*Account ID*: {account_id}\n" f"*SG Name*: {group_name}\n" f"*SG ID*: {group_id}\n" f"*Region*: {group_region}\n") account_details += f"*VPC*: {group_vpc_id}\n\n" if group_vpc_id else "\n" account = Account(id=account_id, name=account_name, region=group_region, role_name=self.config.aws.role_name_reporting) ec2_client = account.client("ec2") if account.session is not None else None sg_instance_details = ec2_owner = ec2_bu = ec2_product = None sg_in_use = sg_in_use_ec2 = sg_in_use_elb = sg_in_use_rds = None sg_public = sg_blind_public = False rds_client = account.client("rds") if account.session is not None else None elb_client = account.client("elb") if account.session is not None else None elbv2_client = account.client("elbv2") if account.session is not None else None iam_client = account.client("iam") if account.session is not None else None rds_instance_details = elb_instance_details = None if ec2_client is not None: ec2_instances = EC2Operations.get_instance_details_of_sg_associated(ec2_client, group_id) sg_instance_details, instance_profile_details,\ sg_in_use_ec2, sg_public, sg_blind_public, \ ec2_owner, ec2_bu, ec2_product = self.build_instances_table(iam_client, ec2_instances) if elb_client is not None and elbv2_client is not None: try: elb_instances = EC2Operations.get_elb_details_of_sg_associated(elb_client, elbv2_client, group_id) elb_instance_details, sg_in_use_elb = self.build_elb_instances_table(elb_instances) except Exception: logging.exception(f"Failed to build ELB details for '{group_name} / {group_id}' in {account}") if rds_client is not None: try: rds_instances = RDSOperations.get_rds_instance_details_of_sg_associated(rds_client, group_id) rds_instance_details, sg_in_use_rds = self.build_rds_instances_table(rds_instances) except Exception: logging.exception(f"Failed to build RDS details for '{group_name} / {group_id}' in {account}") sg_in_use = sg_in_use_ec2 or sg_in_use_elb or sg_in_use_rds owner = group_owner if group_owner is not None else ec2_owner bu = group_bu if group_bu is not None else ec2_bu product = group_product if group_product is not None else ec2_product if bu is None: bu = self.config.get_bu_by_name(group_name) source_description = f"has {status.value} status" if status == RestrictionStatus.OpenCompletely: source_description = "allows access from any IP address (0.0.0.0/0, ::/0)" elif status == RestrictionStatus.OpenPartly: source_description = "allows access from some definite public ip addresses or networks" if sg_public: priority = "Critical" summary_status = "Internet" issue_description = (f"Security group has EC2 instances in public subnets " f"with public IP address attached and " f"{source_description} " f"for following ports:\n") threat = ( f"*Threat*: " f"Instances associated with this security group are accessible via public route over Internet and " f"have ingress rules which allows access to critical services which should be accessible " f"only from VPN or Direct Connect. Accessing these instances via Internet can lead to leakage " f"to third parties of login credentials for such services as databases/remote access." f"Open and Unrestricted access from Internet increases opportunities for " f"malicious activity from public internet which can potentially result into " f"hacking, denial-of-service attacks, loss of data, etc. This also provides " f"an ingress point to the attackers to gain backdoor access within the other " f"critical services.\n" ) elif sg_blind_public: priority = "Critical" summary_status = "Internet" issue_description = (f"Security group has EC2 instances in private subnets " f"with public IP address attached and " f"{source_description} " f"for following ports:\n") threat = (f"*Threat*: " f"Instances listed below can be probed by external attack vectors and " f"make them vulnerable to blind injection based attacks, as although " f"the EC2 instances is in a private subnet, if security group and NACL " f"are allowing access from the internet incoming, traffic will reach " f"instances when someone is probing the public IP of the instances. " f"However, there will be no return traffic due to the lack of an IGW.\n") elif not sg_in_use: priority = "Minor" summary_status = "Unused" issue_description = (f"Security group has no EC2 instances attached and " f"{source_description} " f"for following ports:\n") threat = (f"*Threat*: " f"An unused SG can be leveraged to gain control/access within the network " f"if attached to any exposed instance. This unrestricted access increases " f"opportunities for malicious activity (hacking, denial-of-service attacks, " f"loss of data).\n") else: priority = "Major" summary_status = "Intranet" issue_description = ( f"Security group has EC2 instances in in private subnets and " f"{source_description} " f"for following ports:\n") threat = (f"*Threat*: " f"Open access within the network not only provides unrestricted access to " f"other servers but increases opportunities for malicious activity (hacking, " f"denial-of-service attacks, loss of data) if attacker gains access to the " f"services within the network, thus providing lateral movement.\n") tags_table = JiraOperations.build_tags_table(tags) issue_description = ( f"{issue_description}" f"{open_port_details}" f"{threat}" f"{account_details}") if status == RestrictionStatus.OpenCompletely: auto_remediation_date = (self.config.now + self.config.sg.issue_retention_date).date() issue_description += f"\n{{color:red}}*Auto-Remediation Date*: {auto_remediation_date}{{color}}\n\n" issue_description += f"{tags_table}" issue_description += f"{sg_instance_details if sg_instance_details else ''}" issue_description += f"{rds_instance_details if rds_instance_details else ''}" issue_description += f"{elb_instance_details if elb_instance_details else ''}" issue_description += f"{instance_profile_details if instance_profile_details else ''}" issue_description += ( f"*Recommendation*: " f"Allow access only for a minimum set of required ip addresses/ranges from [RFC1918|https://tools.ietf.org/html/rfc1918]. " ) if self.config.whitelisting_procedure_url: issue_description += (f"For any other exceptions, please follow the [whitelisting procedure|{self.config.whitelisting_procedure_url}] " f"and provide a strong business reasoning. ") issue_description += f"Be sure to delete overly permissive rules after creating rules that are more restrictive.\n" issue_summary = (f"{summary_status} open security group '{group_name}'" f" in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}") try: response = jira.add_issue( issue_summary=issue_summary, issue_description=issue_description, priority=priority, labels=["insecure-services"], owner=owner, account_id=account_id, bu=bu, product=product, ) except Exception: logging.exception("Failed to create jira ticket") continue if response is not None: issue.jira_details.ticket = response.ticket_id issue.jira_details.ticket_assignee_id = response.ticket_assignee_id issue.jira_details.public = sg_public issue.jira_details.blind_public = sg_blind_public issue.jira_details.in_use = sg_in_use issue.jira_details.owner = owner issue.jira_details.business_unit = bu issue.jira_details.product = product slack.report_issue( msg=f"Discovered {issue_summary}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}" f"\n" f"{self.build_open_ports_table_slack(issue.issue_details.perms)}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_reported(ddb_table, issue)
def create_jira_ticket(self): """ Class method to create jira ticket """ table_name = self.config.iamUserInactiveKeys.ddb_table_name main_account = Account(region=self.config.aws.region) ddb_table = main_account.resource("dynamodb").Table(table_name) jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.iamUserInactiveKeys.accounts.items( ): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_not_closed_issues( ddb_table, account_id, IAMKeyInactiveIssue) for issue in issues: key_id = issue.issue_id username = issue.issue_details.username # issue has been already reported if issue.timestamps.reported is not None: if issue.status in [ IssueStatus.Resolved, IssueStatus.Whitelisted ]: logging.debug( f"Closing {issue.status.value} inactive access key '{key_id} / {username}' issue" ) comment = ( f"Closing {issue.status.value} inactive access key '{key_id} / {username}' issue " f"in '{account_name} / {account_id}' account") jira.close_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", account_id=account_id, ) IssueOperations.set_status_closed(ddb_table, issue) else: logging.debug( f"No changes for '{key_id} / {username}'") # issue has not been reported yet else: logging.debug( f"Reporting inactive access key '{key_id} / {username}' issue" ) issue_summary = ( f"IAM access key '{key_id}' for '{username}' has not been used " f"for {self.config.iamUserInactiveKeys.inactive_criteria_days.days} days " f"in '{account_name} / {account_id}' account") create_date = dateutil.parser.parse( issue.issue_details.create_date).replace( tzinfo=None).isoformat(' ', 'minutes') last_used = dateutil.parser.parse( issue.issue_details.last_used).replace( tzinfo=None).isoformat(' ', 'minutes') issue_description = ( f"IAM access key has not been used for {self.config.iamUserInactiveKeys.inactive_criteria_days.days} days.\n\n" f"*Risk*: Low\n\n" f"*Account Name*: {account_name}\n" f"*Account ID*: {account_id}\n" f"*User Name*: {username}\n" f"*Key ID*: {key_id}\n" f"*Key created*: {create_date}\n" f"*Key last used*: {last_used}\n" f"\n") auto_remediation_date = ( self.config.now + self.config.iamUserInactiveKeys.issue_retention_date ).date() issue_description += f"\n{{color:red}}*Auto-Remediation Date*: {auto_remediation_date}{{color}}\n\n" issue_description += f"*Recommendation*: Deactivate specified inactive user access key. " if self.config.whitelisting_procedure_url: issue_description += ( f"For any other exceptions, please follow the [whitelisting procedure|{self.config.whitelisting_procedure_url}] " f"and provide a strong business reasoning. ") try: response = jira.add_issue( issue_summary=issue_summary, issue_description=issue_description, priority="Major", labels=["inactive-iam-keys"], account_id=account_id, ) except Exception: logging.exception("Failed to create jira ticket") continue if response is not None: issue.jira_details.ticket = response.ticket_id issue.jira_details.ticket_assignee_id = response.ticket_assignee_id slack.report_issue( msg=f"Discovered {issue_summary}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", account_id=account_id, ) IssueOperations.set_status_reported(ddb_table, issue)
def create_tickets_cloud_trail_logging(self): """ Class function to create jira tickets """ table_name = self.config.cloudtrails.ddb_table_name main_account = Account(region=self.config.aws.region) ddb_table = main_account.resource("dynamodb").Table(table_name) jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.cloudtrails.accounts.items( ): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_not_closed_issues( ddb_table, account_id, CloudTrailIssue) for issue in issues: region = issue.issue_id # issue has been already reported if issue.timestamps.reported is not None: if issue.status in [ IssueStatus.Resolved, IssueStatus.Whitelisted ]: logging.debug( f"Closing {issue.status.value} '{region}' CloudTrail logging issue" ) comment = ( f"Closing {issue.status.value} issue with '{region}' CloudTrail logging in " f"'{account_name} / {account_id}'") jira.close_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", account_id=account_id, ) IssueOperations.set_status_closed(ddb_table, issue) # issue.status != IssueStatus.Closed (should be IssueStatus.Open) elif issue.timestamps.updated > issue.timestamps.reported: logging.debug(f"Updating '{region}' issue") comment = "Issue details are changed, please check again.\n" comment += self.build_trail_status( issue.issue_details.disabled, issue.issue_details.delivery_errors) comment += f"\n\n" comment += self.build_trails_table( issue.issue_details.trails) jira.update_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg= f"CloudTrail logging '{region}' ssue is changed in " f"'{account_name} / {account_id}'" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", account_id=account_id, ) IssueOperations.set_status_updated(ddb_table, issue) else: logging.debug(f"No changes for '{region}' issue") # issue has not been reported yet else: logging.debug( f"Reporting '{region}' CloudTrail logging issue") if issue.issue_details.disabled: issue_summary = f"Disabled CloudTrail in '{account_name} / {account_id} / {region}' " issue_description = "No enabled CloudTrails for region available." recommendation = f"Create CloudTrail for region" elif issue.issue_details.delivery_errors: issue_summary = f"CloudTrail logging issues in '{account_name} / {account_id} / {region}' " issue_description = "CloudTrail has issues with logging." recommendation = f"Check policies for CloudTrail logging" else: raise Exception( "not disabled and no errors, this should not have happened" ) issue_description = (f"{issue_description}\n\n" f"*Risk*: High\n\n" f"*Account Name*: {account_name}\n" f"*Account ID*: {account_id}\n" f"*Region*: {region}\n") issue_description += self.build_trail_status( issue.issue_details.disabled, issue.issue_details.delivery_errors) issue_description += self.build_trails_table( issue.issue_details.trails) issue_description += f"\n\n*Recommendation*: {recommendation}. " if self.config.whitelisting_procedure_url: issue_description += ( f"For any other exceptions, please follow the [whitelisting procedure|{self.config.whitelisting_procedure_url}] " f"and provide a strong business reasoning. ") try: response = jira.add_issue( issue_summary=issue_summary, issue_description=issue_description, priority="Major", labels=["cloud-trail-disabled"], account_id=account_id, ) except Exception: logging.exception("Failed to create jira ticket") continue if response is not None: issue.jira_details.ticket = response.ticket_id issue.jira_details.ticket_assignee_id = response.ticket_assignee_id slack.report_issue( msg=f"Discovered {issue_summary}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", account_id=account_id, ) IssueOperations.set_status_reported(ddb_table, issue)
def create_tickets_ebsvolumes(self): """ Class method to create jira tickets """ table_name = self.config.ebsVolume.ddb_table_name main_account = Account(region=self.config.aws.region) ddb_table = main_account.resource("dynamodb").Table(table_name) jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.ebsVolume.accounts.items(): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_not_closed_issues( ddb_table, account_id, EBSUnencryptedVolumeIssue) for issue in issues: volume_id = issue.issue_id region = issue.issue_details.region tags = issue.issue_details.tags # issue has been already reported if issue.timestamps.reported is not None: owner = issue.jira_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product if issue.status in [ IssueStatus.Resolved, IssueStatus.Whitelisted ]: logging.debug( f"Closing {issue.status.value} EBS unencrypted volume '{volume_id}' issue" ) comment = ( f"Closing {issue.status.value} EBS unencrypted volume '{volume_id}' issue " f"in '{account_name} / {account_id}' account, '{region}' region" ) jira.close_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_closed(ddb_table, issue) # issue.status != IssueStatus.Closed (should be IssueStatus.Open) elif issue.timestamps.updated > issue.timestamps.reported: logging.error( f"TODO: update jira ticket with new data: {table_name}, {account_id}, {volume_id}" ) slack.report_issue( msg= f"EBS unencrypted volume '{volume_id}' issue is changed " f"in '{account_name} / {account_id}' account, '{region}' region" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_updated(ddb_table, issue) else: logging.debug(f"No changes for '{volume_id}'") # issue has not been reported yet else: logging.debug( f"Reporting EBS unencrypted volume '{volume_id}' issue" ) # if owner/bu/product tags exist on volume - use it volume_owner = tags.get("owner", None) volume_bu = tags.get("bu", None) volume_product = tags.get("product", None) issue_description = ( f"EBS volume needs to be encrypted.\n\n" f"*Risk*: High\n\n" f"*Account Name*: {account_name}\n" f"*Account ID*: {account_id}\n" f"*Region*: {region}\n" f"*Volume ID*: {volume_id}\n") ec2_details = ec2_owner = ec2_bu = ec2_product = None if issue.issue_details.attachments: account = Account( id=account_id, name=account_name, region=region, role_name=self.config.aws.role_name_reporting) if account.session is not None: ec2_client = account.client("ec2") ec2_instances = [] for instance_id, state in issue.issue_details.attachments.items( ): metadata = EC2Operations.get_instance_meta_data( ec2_client, instance_id) if metadata is not None: ec2_instances.append({ 'ec2': metadata, 'state': state }) ec2_details, ec2_owner, ec2_bu, ec2_product = self.build_instances_table( ec2_instances) owner = volume_owner if volume_owner is not None else ec2_owner bu = volume_bu if volume_bu is not None else ec2_bu product = volume_product if volume_product is not None else ec2_product issue_description += JiraOperations.build_tags_table(tags) issue_description += ec2_details if ec2_details else '' issue_description += "*Recommendation*: Encrypt EBS volume. " if self.config.whitelisting_procedure_url: issue_description += ( f"For any other exceptions, please follow the [whitelisting procedure|{self.config.whitelisting_procedure_url}] " f"and provide a strong business reasoning. ") issue_summary = ( f"EBS unencrypted volume '{volume_id}' " f" in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}" ) try: response = jira.add_issue( issue_summary=issue_summary, issue_description=issue_description, priority="Major", labels=["unencrypted-ebs-volumes"], owner=owner, account_id=account_id, bu=bu, product=product, ) except Exception: logging.exception("Failed to create jira ticket") continue if response is not None: issue.jira_details.ticket = response.ticket_id issue.jira_details.ticket_assignee_id = response.ticket_assignee_id issue.jira_details.owner = owner issue.jira_details.business_unit = bu issue.jira_details.product = product slack.report_issue( msg=f"Discovered {issue_summary}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_reported(ddb_table, issue)
def create_tickets_s3buckets(self): """ Class method to create jira tickets """ table_name = self.config.s3policy.ddb_table_name main_account = Account(region=self.config.aws.region) ddb_table = main_account.resource("dynamodb").Table(table_name) jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.aws.accounts.items(): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_not_closed_issues( ddb_table, account_id, S3PolicyIssue) for issue in issues: bucket_name = issue.issue_id tags = issue.issue_details.tags policy = issue.issue_details.policy # issue has been already reported if issue.timestamps.reported is not None: owner = issue.issue_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product if issue.status in [ IssueStatus.Resolved, IssueStatus.Whitelisted ]: logging.debug( f"Closing {issue.status.value} S3 bucket '{bucket_name}' public policy issue" ) comment = ( f"Closing {issue.status.value} S3 bucket '{bucket_name}' public policy " f"in '{account_name} / {account_id}' account ") jira.close_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_closed(ddb_table, issue) # issue.status != IssueStatus.Closed (should be IssueStatus.Open) elif issue.timestamps.updated > issue.timestamps.reported: logging.debug( f"Updating S3 bucket '{bucket_name}' public policy issue" ) comment = "Issue details are changed, please check again.\n" # Adding new bucket policy json as attachment to Jira ticket. attachment = jira.add_attachment( ticket_id=issue.jira_details.ticket, filename=self.attachment_name( account_id, bucket_name), text=policy) if attachment is not None: comment += f"New policy - [^{attachment.filename}].\n" comment += JiraOperations.build_tags_table(tags) jira.update_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg= f"S3 bucket '{bucket_name}' pubic policy issue is changed " f"in '{account_name} / {account_id}' account" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_updated(ddb_table, issue) else: logging.debug(f"No changes for '{bucket_name}'") # issue has not been reported yet else: logging.debug( f"Reporting S3 bucket '{bucket_name}' public policy issue" ) owner = tags.get("owner", None) bu = tags.get("bu", None) product = tags.get("product", None) if bu is None: bu = self.config.get_bu_by_name(bucket_name) issue_summary = ( f"S3 bucket '{bucket_name}' with public policy " f"in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}" ) issue_description = ( f"Bucket policy allows unrestricted public access.\n\n" f"*Threat*: " f"This creates potential security vulnerabilities by allowing anyone to add, modify, or remove items in a bucket.\n\n" f"*Risk*: High\n\n" f"*Account Name*: {account_name}\n" f"*Account ID*: {account_id}\n" f"*S3 Bucket name*: {bucket_name}\n" f"*Bucket Owner*: {owner}\n" f"\n") auto_remediation_date = ( self.config.now + self.config.s3policy.issue_retention_date).date() issue_description += f"\n{{color:red}}*Auto-Remediation Date*: {auto_remediation_date}{{color}}\n\n" issue_description += JiraOperations.build_tags_table(tags) issue_description += f"\n" issue_description += ( f"*Recommendation*: " f"Check if global access is truly needed and " f"if not - update bucket permissions to restrict access to specific private IP ranges from RFC1819." ) try: response = jira.add_issue( issue_summary=issue_summary, issue_description=issue_description, priority="Major", labels=["publics3"], owner=owner, account_id=account_id, bu=bu, product=product, ) except Exception: logging.exception("Failed to create jira ticket") continue if response is not None: issue.jira_details.ticket = response.ticket_id issue.jira_details.ticket_assignee_id = response.ticket_assignee_id # Adding bucket policy json as attachment to Jira ticket. jira.add_attachment( ticket_id=issue.jira_details.ticket, filename=self.attachment_name( account_id, bucket_name), text=policy) issue.jira_details.owner = owner issue.jira_details.business_unit = bu issue.jira_details.product = product slack.report_issue( msg=f"Discovered {issue_summary}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_reported(ddb_table, issue)
def clean_ami_public_access(self): """ Class method to clean AMI public access which are violating aws best practices """ main_account = Account(region=config.aws.region) ddb_table = main_account.resource("dynamodb").Table( self.config.publicAMIs.ddb_table_name) retention_period = self.config.publicAMIs.remediation_retention_period jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.aws.accounts.items(): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_open_issues( ddb_table, account_id, PublicAMIIssue) for issue in issues: ami_id = issue.issue_id in_whitelist = self.config.publicAMIs.in_whitelist( account_id, ami_id) if in_whitelist: logging.debug(f"Skipping {ami_id} (in whitelist)") # Adding label with "whitelisted" to jira ticket. jira.add_label(ticket_id=issue.jira_details.ticket, label=IssueStatus.Whitelisted.value) continue if issue.timestamps.reported is None: logging.debug(f"Skipping '{ami_id}' (was not reported)") continue if issue.timestamps.remediated is not None: logging.debug( f"Skipping {ami_id} (has been already remediated)") continue updated_date = issue.timestamp_as_datetime no_of_days_issue_created = (self.config.now - updated_date).days if no_of_days_issue_created >= retention_period: owner = issue.jira_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product try: account = Account( id=account_id, name=account_name, region=issue.issue_details.region, role_name=self.config.aws.role_name_reporting) if account.session is None: continue checker = PublicAMIChecker(account=account) checker.check(amis_to_check=[ami_id]) ami = checker.get_ami(ami_id) if ami is None: logging.debug(f"AMI {ami_id} was removed by user") elif not ami.public_access: logging.debug( f"AMI {ami.name} public access issue was remediated by user" ) else: logging.debug(f"Remediating '{ami.name}' ") remediation_succeed = True if ami.modify_image_attribute(): comment = ( f"AMI '{ami.name}' public access issue " f"in '{account_name} / {account_id}' account " f"was remediated by hammer") else: remediation_succeed = False comment = ( f"Failed to remediate AMI '{ami.name}' public access issue " f"in '{account_name} / {account_id}' account " f"due to some limitations. Please, check manually" ) jira.remediate_issue( ticket_id=issue.jira_details.ticket, comment=comment, reassign=remediation_succeed, ) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_remediated( ddb_table, issue) except Exception: logging.exception( f"Error occurred while updating AMI '{ami_id}' access " f"in '{account_name} / {account_id}'") else: logging.debug( f"Skipping '{ami_id}' " f"({retention_period - no_of_days_issue_created} days before remediation)" )
def create_tickets_s3_unencrypted_buckets(self): """ Class method to create jira tickets """ table_name = self.config.s3Encrypt.ddb_table_name main_account = Account(region=self.config.aws.region) ddb_table = main_account.resource("dynamodb").Table(table_name) jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.aws.accounts.items(): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_not_closed_issues( ddb_table, account_id, S3EncryptionIssue) for issue in issues: bucket_name = issue.issue_id tags = issue.issue_details.tags # issue has been already reported if issue.timestamps.reported is not None: owner = issue.issue_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product if issue.status in [ IssueStatus.Resolved, IssueStatus.Whitelisted ]: logging.debug( f"Closing {issue.status.value} S3 bucket '{bucket_name}' unencrypted issue" ) comment = ( f"Closing {issue.status.value} S3 bucket '{bucket_name}' unencrypted issue " f"in '{account_name} / {account_id}' account") jira.close_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_closed(ddb_table, issue) # issue.status != IssueStatus.Closed (should be IssueStatus.Open) elif issue.timestamps.updated > issue.timestamps.reported: logging.debug( f"Updating S3 bucket '{bucket_name}' unencrypted issue" ) comment = "Issue details are changed, please check again.\n" comment += JiraOperations.build_tags_table(tags) jira.update_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg= f"S3 bucket '{bucket_name}' unencrypted issue is changed " f"in '{account_name} / {account_id}' account" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_updated(ddb_table, issue) else: logging.debug(f"No changes for '{bucket_name}'") # issue has not been reported yet else: logging.debug( f"Reporting S3 bucket '{bucket_name}' unencrypted issue" ) owner = tags.get("owner", None) bu = tags.get("bu", None) product = tags.get("product", None) if bu is None: bu = self.config.get_bu_by_name(bucket_name) issue_summary = ( f"S3 bucket '{bucket_name}' unencrypted " f"in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}" ) issue_description = ( f"Bucket is unencrypted.\n\n" f"*Threat*: " f"Based on data protection policies, data that is classified as sensitive information or " f"intellectual property of the organization needs to be encrypted. Additionally, as part of the " f"initiative of Encryption Everywhere, it is necessary to encrypt the data in order to ensure the " f"confidentiality and integrity of the data.\n\n" f"*Risk*: High\n\n" f"*Account Name*: {account_name}\n" f"*Account ID*: {account_id}\n" f"*S3 Bucket name*: {bucket_name}\n" f"*Bucket Owner*: {owner}\n" f"\n") auto_remediation_date = ( self.config.now + self.config.s3Encrypt.issue_retention_date).date() issue_description += f"\n{{color:red}}*Auto-Remediation Date*: {auto_remediation_date}{{color}}\n\n" issue_description += JiraOperations.build_tags_table(tags) issue_description += f"\n" issue_description += ( f"*Recommendation*: " f"Encrypt the bucket by enabling server-side encryption with either " f"Amazon S3-managed keys (SSE-S3) or AWS KMS-managed keys (SSE-KMS)." ) try: response = jira.add_issue( issue_summary=issue_summary, issue_description=issue_description, priority="Major", labels=["s3-unencrypted"], owner=owner, account_id=account_id, bu=bu, product=product, ) except Exception: logging.exception("Failed to create jira ticket") continue if response is not None: issue.jira_details.ticket = response.ticket_id issue.jira_details.ticket_assignee_id = response.ticket_assignee_id issue.jira_details.owner = owner issue.jira_details.business_unit = bu issue.jira_details.product = product slack.report_issue( msg=f"Discovered {issue_summary}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_reported(ddb_table, issue)
def clean_sqs_policy_permissions(self): """ Class method to clean SQS queues which are violating aws best practices """ main_account = Account(region=config.aws.region) ddb_table = main_account.resource("dynamodb").Table( self.config.sqspolicy.ddb_table_name) backup_bucket = config.aws.s3_backup_bucket retention_period = self.config.sqspolicy.remediation_retention_period jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.aws.accounts.items(): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_open_issues( ddb_table, account_id, SQSPolicyIssue) for issue in issues: queue_url = issue.issue_id queue_name = issue.issue_details.name queue_region = issue.issue_details.region in_whitelist = self.config.sqspolicy.in_whitelist( account_id, queue_url) if in_whitelist: logging.debug(f"Skipping {queue_name} (in whitelist)") continue if issue.timestamps.reported is None: logging.debug( f"Skipping '{queue_name}' (was not reported)") continue if issue.timestamps.remediated is not None: logging.debug( f"Skipping {queue_name} (has been already remediated)") continue updated_date = issue.timestamp_as_datetime no_of_days_issue_created = (self.config.now - updated_date).days if no_of_days_issue_created >= retention_period: owner = issue.jira_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product try: account = Account( id=account_id, name=account_name, region=issue.issue_details.region, role_name=self.config.aws.role_name_reporting) if account.session is None: continue checker = SQSPolicyChecker(account=account) checker.check(queues=[queue_url]) queue = checker.get_queue(queue_name) if queue is None: logging.debug( f"Queue {queue_name} was removed by user") elif not queue.public: logging.debug( f"Queue {queue.name} policy issue was remediated by user" ) else: logging.debug(f"Remediating '{queue.name}' policy") backup_path = queue.backup_policy_s3( main_account.client("s3"), backup_bucket) remediation_succeed = True if queue.restrict_policy(): comment = ( f"Policy backup was saved to " f"[{backup_path}|https://s3.console.aws.amazon.com/s3/object/{backup_bucket}/{backup_path}]. " f"Queue '{queue.name}' policy issue " f"in '{account_name} / {account_id}' account, '{queue_region}' region " f"was remediated by hammer") else: remediation_succeed = False comment = ( f"Failed to remediate queue '{queue.name}' policy issue " f"in '{account_name} / {account_id}' account, '{queue_region}' region " f"due to some limitations. Please, check manually" ) jira.remediate_issue( ticket_id=issue.jira_details.ticket, comment=comment, reassign=remediation_succeed, ) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_remediated( ddb_table, issue) except Exception: logging.exception( f"Error occurred while updating queue '{queue_url}' policy " f"in '{account_name} / {account_id}', '{queue_region}' region" ) else: logging.debug( f"Skipping '{queue_name}' " f"({retention_period - no_of_days_issue_created} days before remediation)" )
def clean_public_rds_snapshots(self, batch=False): """ Class method to remediate public rds snapshot """ main_account = Account(region=config.aws.region) ddb_table = main_account.resource("dynamodb").Table( self.config.rdsSnapshot.ddb_table_name) #backup_bucket = config.aws.s3_backup_bucket retention_period = self.config.rdsSnapshot.remediation_retention_period jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.rdsSnapshot.remediation_accounts.items( ): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_open_issues( ddb_table, account_id, RdsPublicSnapshotIssue) for issue in issues: if issue.timestamps.remediated is not None: logging.debug( f"Skipping '{issue.issue_id}' (has been already remediated)" ) continue in_whitelist = self.config.rdsSnapshot.in_whitelist( account_id, issue.issue_id) if in_whitelist: logging.debug( f"Skipping '{issue.issue_id}' (in whitelist)") # Adding label with "whitelisted" to jira ticket. jira.add_label(ticket_id=issue.jira_details.ticket, label=IssueStatus.Whitelisted.value) continue if issue.timestamps.reported is None: logging.debug( f"Skipping '{issue.issue_id}' (was not reported)") continue updated_date = issue.timestamp_as_datetime no_of_days_issue_created = (self.config.now - updated_date).days if no_of_days_issue_created >= retention_period: owner = issue.jira_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product try: if not batch and \ not confirm(f"Do you want to remediate public RDS snapshot '{issue.issue_id}'", False): continue account = Account( id=account_id, name=account_name, region=issue.issue_details.region, role_name=self.config.aws.role_name_reporting) if account.session is None: continue remediation_succeed = True try: RdsSnapshotOperations.make_private( account.client("rds"), issue.issue_details.engine, issue.issue_details.name) comment = ( f"RDS public snapshot '{issue.issue_id}' issue " f"in '{account_name} / {account_id}' account, '{issue.issue_details.region}' region " f"was remediated by hammer") except Exception: remediation_succeed = False logging.exception( f"Failed to make private '{issue.issue_id}' RDS public snapshot" ) comment = ( f"Failed to remediate RDS public snapshot '{issue.issue_id}' issue " f"in '{account_name} / {account_id}' account, '{issue.issue_details.region}' region " f"due to some limitations. Please, check manually" ) jira.remediate_issue( ticket_id=issue.jira_details.ticket, comment=comment, reassign=remediation_succeed, ) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_remediated(ddb_table, issue) except Exception: logging.exception( f"Error occurred while updating RDS snapshot {issue.issue_id} " f"in {account_id}/{issue.issue_details.region}")
def clean_s3bucket_policy_permissions(self, batch=False): """ Class method to clean S3 buckets which are violating aws best practices """ main_account = Account(region=config.aws.region) ddb_table = main_account.resource("dynamodb").Table( self.config.s3policy.ddb_table_name) backup_bucket = config.aws.s3_backup_bucket retention_period = self.config.s3policy.remediation_retention_period jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.s3policy.remediation_accounts.items( ): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_open_issues( ddb_table, account_id, S3PolicyIssue) for issue in issues: bucket_name = issue.issue_id in_whitelist = self.config.s3policy.in_whitelist( account_id, bucket_name) #in_fixlist = self.config.s3policy.in_fixnow(account_id, bucket_name) if in_whitelist: logging.debug(f"Skipping {bucket_name} (in whitelist)") # Adding label with "whitelisted" to jira ticket. jira.add_label(ticket_id=issue.jira_details.ticket, label=IssueStatus.Whitelisted.value) continue # if not in_fixlist: # logging.debug(f"Skipping {bucket_name} (not in fixlist)") # continue if issue.timestamps.reported is None: logging.debug( f"Skipping '{bucket_name}' (was not reported)") continue if issue.timestamps.remediated is not None: logging.debug( f"Skipping {bucket_name} (has been already remediated)" ) continue updated_date = issue.timestamp_as_datetime no_of_days_issue_created = (self.config.now - updated_date).days if no_of_days_issue_created >= retention_period: owner = issue.jira_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product try: account = Account( id=account_id, name=account_name, role_name=self.config.aws.role_name_reporting) if account.session is None: continue checker = S3BucketsPolicyChecker(account=account) checker.check(buckets=[bucket_name]) s3bucket = checker.get_bucket(bucket_name) if s3bucket is None: logging.debug( f"Bucket {s3bucket.name} was removed by user") elif not s3bucket.public_by_policy: logging.debug( f"Bucket {s3bucket.name} policy issue was remediated by user" ) else: if not batch and \ not confirm(f"Do you want to remediate '{bucket_name}' S3 bucket policy", False): continue logging.debug( f"Remediating '{s3bucket.name}' policy") backup_path = s3bucket.backup_policy_s3( main_account.client("s3"), backup_bucket) remediation_succeed = True if s3bucket.restrict_policy(): comment = ( f"Policy backup was saved to " f"[{backup_path}|https://s3.console.aws.amazon.com/s3/object/{backup_bucket}/{backup_path}]. " f"Bucket '{s3bucket.name}' policy issue " f"in '{account_name} / {account_id}' account " f"was remediated by hammer") else: remediation_succeed = False comment = ( f"Failed to remediate bucket '{s3bucket.name}' policy issue " f"in '{account_name} / {account_id}' account " f"due to some limitations. Please, check manually" ) jira.remediate_issue( ticket_id=issue.jira_details.ticket, comment=comment, reassign=remediation_succeed, ) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_remediated( ddb_table, issue) except Exception: logging.exception( f"Error occurred while updating bucket '{bucket_name}' policy " f"in '{account_name} / {account_id}'") else: logging.debug( f"Skipping '{bucket_name}' " f"({retention_period - no_of_days_issue_created} days before remediation)" )
def create_tickets_sqs_policy(self): """ Class method to create jira tickets """ table_name = self.config.sqspolicy.ddb_table_name main_account = Account(region=self.config.aws.region) ddb_table = main_account.resource("dynamodb").Table(table_name) jira = JiraReporting(self.config) slack = SlackNotification(self.config) for account_id, account_name in self.config.aws.accounts.items(): logging.debug(f"Checking '{account_name} / {account_id}'") issues = IssueOperations.get_account_not_closed_issues( ddb_table, account_id, SQSPolicyIssue) for issue in issues: queue_url = issue.issue_id queue_name = issue.issue_details.name queue_region = issue.issue_details.region tags = issue.issue_details.tags policy = issue.issue_details.policy # issue has been already reported if issue.timestamps.reported is not None: owner = issue.issue_details.owner bu = issue.jira_details.business_unit product = issue.jira_details.product if issue.status in [ IssueStatus.Resolved, IssueStatus.Whitelisted ]: logging.debug( f"Closing {issue.status.value} SQS queue '{queue_name}' public policy issue" ) comment = ( f"Closing {issue.status.value} SQS queue '{queue_name}' public policy " f"in '{account_name} / {account_id}' account, '{queue_region}' region" ) if issue.status == IssueStatus.Whitelisted: # Adding label with "whitelisted" to jira ticket. jira.add_label(ticket_id=issue.jira_details.ticket, label=IssueStatus.Whitelisted.value) jira.close_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg=f"{comment}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_closed(ddb_table, issue) # issue.status != IssueStatus.Closed (should be IssueStatus.Open) elif issue.timestamps.updated > issue.timestamps.reported: logging.debug( f"Updating SQS queue '{queue_name}' public policy issue" ) comment = "Issue details are changed, please check again.\n" # Adding new SQS queue policy json as attachment to Jira ticket. attachment = jira.add_attachment( ticket_id=issue.jira_details.ticket, filename=self.attachment_name( account_id, queue_region, queue_name), text=policy) if attachment is not None: comment += f"New policy - [^{attachment.filename}].\n" comment += JiraOperations.build_tags_table(tags) jira.update_issue(ticket_id=issue.jira_details.ticket, comment=comment) slack.report_issue( msg= f"SQS queue '{queue_name}' pubic policy issue is changed " f"in '{account_name} / {account_id}' account, '{queue_region}' region" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_updated(ddb_table, issue) else: logging.debug(f"No changes for '{queue_name}'") # issue has not been reported yet else: logging.debug( f"Reporting SQS queue '{queue_name}' public policy issue" ) owner = tags.get("owner", None) bu = tags.get("bu", None) product = tags.get("product", None) if bu is None: bu = self.config.get_bu_by_name(queue_name) issue_summary = ( f"SQS queue '{queue_name}' with public policy " f"in '{account_name} / {account_id}' account, '{queue_region}' region" f"{' [' + bu + ']' if bu else ''}") issue_description = ( f"Queue policy allows unrestricted public access.\n\n" f"*Threat*: " f"This creates potential security vulnerabilities by allowing anyone to add, modify, or remove items in a SQS.\n\n" f"*Risk*: High\n\n" f"*Account Name*: {account_name}\n" f"*Account ID*: {account_id}\n" f"*SQS queue url*: {queue_url}\n" f"*SQS queue name*: {queue_name}\n" f"*SQS queue region*: {queue_region}\n" f"\n") auto_remediation_date = ( self.config.now + self.config.sqspolicy.issue_retention_date).date() issue_description += f"\n{{color:red}}*Auto-Remediation Date*: {auto_remediation_date}{{color}}\n\n" issue_description += JiraOperations.build_tags_table(tags) issue_description += f"\n" issue_description += ( f"*Recommendation*: " f"Check if global access is truly needed and " f"if not - update SQS queue policy with " f"an [*IpAddress* condition|https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-customer-managed-policy-examples.html#grant-all-permissions-to-all-users-in-cidr-range] " f"in order to restrict access to specific private IP ranges from [RFC1918|https://tools.ietf.org/html/rfc1918]." ) if self.config.whitelisting_procedure_url: issue_description += ( f"For any other exceptions, please follow the [whitelisting procedure|{self.config.whitelisting_procedure_url}] " f"and provide a strong business reasoning. ") try: response = jira.add_issue( issue_summary=issue_summary, issue_description=issue_description, priority="Major", labels=["publicsqs"], owner=owner, account_id=account_id, bu=bu, product=product, ) except Exception: logging.exception("Failed to create jira ticket") continue if response is not None: issue.jira_details.ticket = response.ticket_id issue.jira_details.ticket_assignee_id = response.ticket_assignee_id # Adding SQS queue json as attachment to Jira ticket. jira.add_attachment( ticket_id=issue.jira_details.ticket, filename=self.attachment_name( account_id, queue_region, queue_name), text=policy) issue.jira_details.owner = owner issue.jira_details.business_unit = bu issue.jira_details.product = product slack.report_issue( msg=f"Discovered {issue_summary}" f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}", owner=owner, account_id=account_id, bu=bu, product=product, ) IssueOperations.set_status_reported(ddb_table, issue)