Ejemplo n.º 1
0
def get_scan_results(request_id):
    config = Config()
    main_account = Account(region=config.aws.region)
    api_table = main_account.resource("dynamodb").Table(
        config.api.ddb_table_name)
    request_info = DDB.get_request_data(api_table, request_id)
    if not request_info:
        status_code = 404
        body = {"message": "Request id has not been found."}
    elif request_info['progress'] == request_info['total']:
        status_code = 200
        body = {
            "scan_status": "COMPLETE",
            "scan_results": collect_results(request_info, main_account)
        }
    elif time.time() - request_info['updated'] <= 300:
        status_code = 200
        body = {"scan_status": "IN_PROGRESS"}
    else:
        status_code = 200
        body = {"scan_status": "FAILED"}
    return {
        "statusCode": status_code,
        "body": json.dumps(body, indent=4, default=utility.jsonEncoder)
    }
Ejemplo n.º 2
0
def lambda_handler(event, context):
    set_logging(level=logging.DEBUG)

    logging.debug(f"Got request\n{jsonDumps(event)}")

    if event.get('RequestType', "") == "Delete":
        send_response(event, context, "SUCCESS")
        return

    region = event.get('ResourceProperties', {}).get('Region', None)
    if region is None:
        logging.error("Failed to get region from event")
        send_response(event, context, "FAILED")

    try:
        account = Account(region=region)
        ec2 = account.client(
            'ec2', config=botocore.config.Config(retries={'max_attempts': 3}))
        images = ec2.describe_images(Filters=[{
            "Name": "product-code",
            "Values": [PRODUCT_CODE]
        }])['Images']
    except Exception:
        logging.exception("Failed to describe images")
        send_response(event, context, "FAILED")
        return

    if len(images) == 0:
        logging.error("No images were found")
        send_response(event, context, "FAILED")
        return

    latest = sorted(images, key=itemgetter('CreationDate'))[-1]['ImageId']
    logging.info(f"Latest '{PRODUCT_CODE}' AMI id - '{latest}'")
    send_response(event, context, "SUCCESS", {'Id': latest})
Ejemplo n.º 3
0
 def __init__(self):
     self.config = Config()
     self.enabled = self.config.aws.ddb_backup_enabled
     self.retention_period = self.config.aws.ddb_backup_retention
     self.account = Account(region=self.config.aws.region)
     self.ddb_client = self.account.client('dynamodb')
     self.ddb_resource = self.account.resource('dynamodb')
     self.now = datetime.now(timezone.utc)
     # used as a part of backup name
     self.today = self.now.strftime("%Y-%m-%d")
Ejemplo n.º 4
0
def pytest_generate_tests(metafunc):
    """
    Entrypoint for tests (built-in pytest function for dynamic generation of test cases).
    """
    # Launch EC2 mocking and env preparation
    mock_ebs.start()
    mock_ebs.create_env_volumes(ebs_volumes, region)
    mock_ebs.create_env_snapshots(ebs_volumes, ebs_snapshots, region)

    account = Account(region=region)

    # validate ebs snapshots in mocked env
    checker = EBSPublicSnapshotsChecker(account)
    checker.check()

    for ec2_snapshot in checker.snapshots:
        ec2_snapshot.make_private()

    checker_remediated = EBSPublicSnapshotsChecker(account)
    checker_remediated.check()

    snapshots_list = [(snapshot, False) for snapshot in checker.snapshots]
    snapshots_list += [(snapshot, True) for snapshot in checker_remediated.snapshots]
    # create test cases for each response
    metafunc.parametrize("snapshot,remediated", snapshots_list, ids=ident_snapshot_test)
def pytest_generate_tests(metafunc):
    """
    Entrypoint for tests (built-in pytest function for dynamic generation of test cases).
    """
    # Launch EC2 mocking and env preparation
    mock_ebs.start()
    test_volumes = mock_ebs.create_env_volumes(ebs_volumes, region)

    account = Account(region=region)

    # validate ebs volumes in mocked env
    checker = EBSUnencryptedVolumesChecker(account)
    checker.check(ids=test_volumes)
    # create test cases for each response
    metafunc.parametrize("volume", checker.volumes, ids=ident_volume_test)
Ejemplo n.º 6
0
def pytest_generate_tests(metafunc):
    """
    Entrypoint for tests (built-in pytest function for dynamic generation of test cases).
    """
    # Launch EC2 mocking and env preparation
    mock_ec2.start()
    mock_ec2.create_env(secgroups, region)

    account = Account(region=region)

    checker = SecurityGroupsChecker(account, restricted_ports=restricted_ports)
    checker.check()
    for sg in checker.groups:
        sg.restrict()

    checker_remediated = SecurityGroupsChecker(
        account, restricted_ports=restricted_ports)
    checker_remediated.check()
    groups = [(group, False) for group in checker.groups]
    groups += [(group, True) for group in checker_remediated.groups]
    metafunc.parametrize("group,remediated", groups, ids=ident_test)
Ejemplo n.º 7
0
def pytest_generate_tests(metafunc):
    """
    Entrypoint for tests (built-in pytest function for dynamic generation of test cases).
    """
    # Launch IAM mocking and env preparation
    mock_iam.start()
    mock_iam.create_env(users)

    account = Account()

    # validate user expired keys in mocked env
    checker = IAMKeyChecker(account,
                            now=now,
                            rotation_criteria_days=rotation_criteria_days)
    checker.check(last_used_check_enabled=False)
    keys = []
    for user in checker.users:
        keys += user.keys

    # create test cases for each key
    metafunc.parametrize("key", keys, ids=ident_test)
Ejemplo n.º 8
0
def pytest_generate_tests(metafunc):
    """
    Entrypoint for tests (built-in pytest function for dynamic generation of test cases).
    """
    # Launch EC2 mocking and env preparation
    mock_s3.start()
    mock_s3.create_env(buckets)

    account = Account()

    checker = S3BucketsAclChecker(account)
    checker.check()

    for s3_bucket in checker.buckets:
        s3_bucket.restrict_acl()

    checker_remediated = S3BucketsAclChecker(account)
    checker_remediated.check()

    s3_buckets = [(bucket, False) for bucket in checker.buckets]
    s3_buckets += [(bucket, True) for bucket in checker_remediated.buckets]

    metafunc.parametrize("bucket,remediated", s3_buckets, ids=ident_test)
Ejemplo n.º 9
0
def pytest_generate_tests(metafunc):
    """
    Entrypoint for tests (built-in pytest function for dynamic generation of test cases).
    """
    # Launch EC2 mocking and env preparation
    mock_sqs.start()
    mock_sqs.create_env(queues, region)

    account = Account(region=region)

    checker = SQSPolicyChecker(account)
    checker.check()

    for sqs_queue in checker.queues:
        sqs_queue.restrict_policy()

    checker_remediated = SQSPolicyChecker(account)
    checker_remediated.check()

    sqs_queues = [(queue, False) for queue in checker.queues]
    sqs_queues += [(queue, True) for queue in checker_remediated.queues]

    # create test cases for each response
    metafunc.parametrize("queue,remediated", sqs_queues, ids=ident_test)
Ejemplo n.º 10
0
    def create_jira_ticket(self):
        """ Class method to create jira ticket """
        table_name = self.config.iamUserInactiveKeys.ddb_table_name

        main_account = Account(region=self.config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(table_name)
        jira = JiraReporting(self.config)
        slack = SlackNotification(self.config)

        for account_id, account_name in self.config.iamUserInactiveKeys.accounts.items(
        ):
            logging.debug(f"Checking '{account_name} / {account_id}'")
            issues = IssueOperations.get_account_not_closed_issues(
                ddb_table, account_id, IAMKeyInactiveIssue)
            for issue in issues:
                key_id = issue.issue_id
                username = issue.issue_details.username
                # issue has been already reported
                if issue.timestamps.reported is not None:
                    if issue.status in [
                            IssueStatus.Resolved, IssueStatus.Whitelisted
                    ]:
                        logging.debug(
                            f"Closing {issue.status.value} inactive access key '{key_id} / {username}' issue"
                        )

                        comment = (
                            f"Closing {issue.status.value} inactive access key '{key_id} / {username}' issue "
                            f"in '{account_name} / {account_id}' account")
                        jira.close_issue(ticket_id=issue.jira_details.ticket,
                                         comment=comment)
                        slack.report_issue(
                            msg=f"{comment}"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            account_id=account_id,
                        )
                        IssueOperations.set_status_closed(ddb_table, issue)
                    else:
                        logging.debug(
                            f"No changes for '{key_id} / {username}'")
                # issue has not been reported yet
                else:
                    logging.debug(
                        f"Reporting inactive access key '{key_id} / {username}' issue"
                    )

                    issue_summary = (
                        f"IAM access key '{key_id}' for '{username}' has not been used "
                        f"for {self.config.iamUserInactiveKeys.inactive_criteria_days.days} days "
                        f"in '{account_name} / {account_id}' account")

                    create_date = dateutil.parser.parse(
                        issue.issue_details.create_date).replace(
                            tzinfo=None).isoformat(' ', 'minutes')
                    last_used = dateutil.parser.parse(
                        issue.issue_details.last_used).replace(
                            tzinfo=None).isoformat(' ', 'minutes')
                    issue_description = (
                        f"IAM access key has not been used for {self.config.iamUserInactiveKeys.inactive_criteria_days.days} days.\n\n"
                        f"*Risk*: Low\n\n"
                        f"*Account Name*: {account_name}\n"
                        f"*Account ID*: {account_id}\n"
                        f"*User Name*: {username}\n"
                        f"*Key ID*: {key_id}\n"
                        f"*Key created*: {create_date}\n"
                        f"*Key last used*: {last_used}\n"
                        f"\n")

                    auto_remediation_date = (
                        self.config.now +
                        self.config.iamUserInactiveKeys.issue_retention_date
                    ).date()
                    issue_description += f"\n{{color:red}}*Auto-Remediation Date*: {auto_remediation_date}{{color}}\n\n"

                    issue_description += f"*Recommendation*: Deactivate specified inactive user access key. "

                    if self.config.whitelisting_procedure_url:
                        issue_description += (
                            f"For any other exceptions, please follow the [whitelisting procedure|{self.config.whitelisting_procedure_url}] "
                            f"and provide a strong business reasoning. ")

                    try:
                        response = jira.add_issue(
                            issue_summary=issue_summary,
                            issue_description=issue_description,
                            priority="Major",
                            labels=["inactive-iam-keys"],
                            account_id=account_id,
                        )
                    except Exception:
                        logging.exception("Failed to create jira ticket")
                        continue

                    if response is not None:
                        issue.jira_details.ticket = response.ticket_id
                        issue.jira_details.ticket_assignee_id = response.ticket_assignee_id

                    slack.report_issue(
                        msg=f"Discovered {issue_summary}"
                        f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                        account_id=account_id,
                    )

                    IssueOperations.set_status_reported(ddb_table, issue)
Ejemplo n.º 11
0
def lambda_handler(event, context):
    """ Lambda handler to evaluate public ami issues"""
    set_logging(level=logging.INFO)

    try:
        payload = json.loads(event["Records"][0]["Sns"]["Message"])
        account_id = payload['account_id']
        account_name = payload['account_name']
        # get the last region from the list to process
        region = payload['regions'].pop()
        # if request_id is present in payload then this lambda was called from the API
        request_id = payload.get('request_id', None)
    except Exception:
        logging.exception(f"Failed to parse event\n{event}")
        return

    try:
        config = Config()

        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(
            config.publicAMIs.ddb_table_name)

        account = Account(id=account_id,
                          name=account_name,
                          region=region,
                          role_name=config.aws.role_name_identification)
        if account.session is None:
            return

        logging.debug(f"Checking for Public AMI issues for {account}")

        # existing open issues for account to check if resolved
        open_issues = IssueOperations.get_account_open_issues(
            ddb_table, account_id, PublicAMIIssue)
        # make dictionary for fast search by id
        # and filter by current region
        open_issues = {
            issue.issue_id: issue
            for issue in open_issues if issue.issue_details.region == region
        }
        logging.debug(f"Public AMIs in DDB:\n{open_issues.keys()}")

        checker = PublicAMIChecker(account=account)
        if checker.check():
            for ami in checker.amis:
                logging.debug(f"Checking {ami.id}")
                if ami.public_access:
                    issue = PublicAMIIssue(account_id, ami.id)
                    issue.issue_details.tags = ami.tags
                    issue.issue_details.name = ami.name
                    issue.issue_details.region = region
                    if config.publicAMIs.in_whitelist(account_id, ami.id):
                        issue.status = IssueStatus.Whitelisted
                    else:
                        issue.status = IssueStatus.Open
                    logging.debug(
                        f"Setting {ami.id}/{ami.id} status {issue.status}")
                    IssueOperations.update(ddb_table, issue)
                    # remove issue id from issues_list_from_db (if exists)
                    # as we already checked it
                    open_issues.pop(ami.id, None)

            logging.debug(f"Public AMIs in DDB:\n{open_issues.keys()}")
            # all other unresolved issues in DDB are for removed/remediated keys
            for issue in open_issues.values():
                IssueOperations.set_status_resolved(ddb_table, issue)
        # track the progress of API request to scan specific account/region/feature
        if request_id:
            api_table = main_account.resource("dynamodb").Table(
                config.api.ddb_table_name)
            DDB.track_progress(api_table, request_id)
    except Exception:
        logging.exception(
            f"Failed to check AMI public access for '{account_id} ({account_name})'"
        )
        return

    # push SNS messages until the list with regions to check is empty
    if len(payload['regions']) > 0:
        try:
            Sns.publish(payload["sns_arn"], payload)
        except Exception:
            logging.exception("Failed to chain public AMI checking")

    logging.debug(
        f"Checked AMI public access for '{account_id} ({account_name})'")
Ejemplo n.º 12
0
def lambda_handler(event, context):
    """ Lambda handler to evaluate public EBS snapshots """
    set_logging(level=logging.INFO)

    try:
        payload = json.loads(event["Records"][0]["Sns"]["Message"])
        account_id = payload['account_id']
        account_name = payload['account_name']
        # get the last region from the list to process
        region = payload['regions'].pop()
    except Exception:
        logging.exception(f"Failed to parse event\n{event}")
        return

    try:
        config = Config()

        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(
            config.ebsSnapshot.ddb_table_name)

        account = Account(id=account_id,
                          name=account_name,
                          region=region,
                          role_name=config.aws.role_name_identification)
        if account.session is None:
            return

        logging.debug(f"Checking for public EBS snapshots in {account}")

        # existing open issues for account to check if resolved
        open_issues = IssueOperations.get_account_open_issues(
            ddb_table, account_id, EBSPublicSnapshotIssue)
        # make dictionary for fast search by id
        # and filter by current region
        open_issues = {
            issue.issue_id: issue
            for issue in open_issues if issue.issue_details.region == region
        }
        logging.debug(f"Public EBS snapshots in DDB:\n{open_issues.keys()}")

        checker = EBSPublicSnapshotsChecker(account=account)
        if checker.check():
            for snapshot in checker.snapshots:
                if snapshot.public:
                    issue = EBSPublicSnapshotIssue(account_id, snapshot.id)
                    issue.issue_details.region = snapshot.account.region
                    issue.issue_details.volume_id = snapshot.volume_id
                    issue.issue_details.tags = snapshot.tags
                    if config.ebsSnapshot.in_whitelist(account_id,
                                                       snapshot.id):
                        issue.status = IssueStatus.Whitelisted
                    else:
                        issue.status = IssueStatus.Open
                    logging.debug(
                        f"Setting {snapshot.id} status {issue.status}")
                    IssueOperations.update(ddb_table, issue)
                    # remove issue id from issues_list_from_db (if exists)
                    # as we already checked it
                    open_issues.pop(snapshot.id, None)

            logging.debug(
                f"Public EBS snapshots in DDB:\n{open_issues.keys()}")
            # all other unresolved issues in DDB are for removed/remediated EBS snapshots
            for issue in open_issues.values():
                IssueOperations.set_status_resolved(ddb_table, issue)
    except Exception:
        logging.exception(
            f"Failed to check public EBS snapshots in '{region}' for '{account_id} ({account_name})'"
        )

    # push SNS messages until the list with regions to check is empty
    if len(payload['regions']) > 0:
        try:
            Sns.publish(payload["sns_arn"], payload)
        except Exception:
            logging.exception("Failed to chain public EBS snapshots checking")

    logging.debug(
        f"Checked public EBS snapshots in '{region}' for '{account_id} ({account_name})'"
    )
    def create_tickets_rds_public_snapshots(self):
        """ Class method to create jira tickets """
        table_name = self.config.rdsSnapshot.ddb_table_name

        main_account = Account(region=self.config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(table_name)
        jira = JiraReporting(self.config)
        slack = SlackNotification(self.config)

        for account_id, account_name in self.config.aws.accounts.items():
            logging.debug(f"Checking '{account_name} / {account_id}'")
            issues = IssueOperations.get_account_not_closed_issues(
                ddb_table, account_id, RdsPublicSnapshotIssue)
            for issue in issues:
                snapshot_id = issue.issue_id
                region = issue.issue_details.region
                tags = issue.issue_details.tags
                # issue has been already reported
                if issue.timestamps.reported is not None:
                    owner = issue.jira_details.owner
                    bu = issue.jira_details.business_unit
                    product = issue.jira_details.product

                    if issue.status in [
                            IssueStatus.Resolved, IssueStatus.Whitelisted
                    ]:
                        logging.debug(
                            f"Closing {issue.status.value} RDS public snapshot '{snapshot_id}' issue"
                        )

                        comment = (
                            f"Closing {issue.status.value} RDS public snapshot '{snapshot_id}' issue "
                            f"in '{account_name} / {account_id}' account, '{region}' region"
                        )
                        jira.close_issue(ticket_id=issue.jira_details.ticket,
                                         comment=comment)
                        slack.report_issue(
                            msg=f"{comment}"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                        IssueOperations.set_status_closed(ddb_table, issue)
                    # issue.status != IssueStatus.Closed (should be IssueStatus.Open)
                    elif issue.timestamps.updated > issue.timestamps.reported:
                        logging.error(
                            f"TODO: update jira ticket with new data: {table_name}, {account_id}, {snapshot_id}"
                        )
                        slack.report_issue(
                            msg=
                            f"RDS public snapshot '{snapshot_id}' issue is changed "
                            f"in '{account_name} / {account_id}' account, '{region}' region"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                        IssueOperations.set_status_updated(ddb_table, issue)
                    else:
                        logging.debug(f"No changes for '{snapshot_id}'")
                # issue has not been reported yet
                else:
                    logging.debug(
                        f"Reporting RDS public snapshot '{snapshot_id}' issue")

                    owner = tags.get("owner", None)
                    bu = tags.get("bu", None)
                    product = tags.get("product", None)

                    issue_summary = (
                        f"RDS public snapshot '{snapshot_id}'"
                        f"in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}"
                    )

                    issue_description = (
                        f"The RDS snapshot is marked as public.\n\n"
                        f"*Risk*: High\n\n"
                        f"*Account Name*: {account_name}\n"
                        f"*Account ID*: {account_id}\n"
                        f"*Region*: {region}\n"
                        f"*RDS Snapshot ID*: {snapshot_id}\n")

                    auto_remediation_date = (
                        self.config.now +
                        self.config.rdsSnapshot.issue_retention_date).date()
                    issue_description += f"\n{{color:red}}*Auto-Remediation Date*: {auto_remediation_date}{{color}}\n\n"

                    issue_description += JiraOperations.build_tags_table(tags)

                    issue_description += "\n"
                    issue_description += (
                        f"*Recommendation*: "
                        f"Unless you are certain you want to share all the data in the snapshot with "
                        f"all AWS accounts and users, modify the permissions: mark the snapshot as private, "
                        f"and then specify the accounts that you want to give permissions to."
                    )

                    try:
                        response = jira.add_issue(
                            issue_summary=issue_summary,
                            issue_description=issue_description,
                            priority="Major",
                            labels=["rds-public-snapshots"],
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                    except Exception:
                        logging.exception("Failed to create jira ticket")
                        continue

                    if response is not None:
                        issue.jira_details.ticket = response.ticket_id
                        issue.jira_details.ticket_assignee_id = response.ticket_assignee_id

                    issue.jira_details.owner = owner
                    issue.jira_details.business_unit = bu
                    issue.jira_details.product = product

                    slack.report_issue(
                        msg=f"Discovered {issue_summary}"
                        f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                        owner=owner,
                        account_id=account_id,
                        bu=bu,
                        product=product,
                    )

                    IssueOperations.set_status_reported(ddb_table, issue)
Ejemplo n.º 14
0
    def clean_iam_access_keys(self, batch=False):
        """ Class method to remediate IAM User access keys which are not used """
        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(
            self.config.iamUserInactiveKeys.ddb_table_name)

        retention_period = self.config.iamUserInactiveKeys.remediation_retention_period

        jira = JiraReporting(self.config)
        slack = SlackNotification(self.config)

        for account_id, account_name in self.config.aws.accounts.items():
            logging.debug("* Account Name:" + account_name +
                          " :::Account ID:::" + account_id)
            issues = IssueOperations.get_account_open_issues(
                ddb_table, account_id, IAMKeyInactiveIssue)
            for issue in issues:
                key_id = issue.issue_id
                username = issue.issue_details.username

                user_in_whitelist = self.config.iamUserInactiveKeys.in_whitelist(
                    account_id, username)
                key_in_whitelist = self.config.iamUserInactiveKeys.in_whitelist(
                    account_id, key_id)

                if user_in_whitelist or key_in_whitelist:
                    logging.debug(
                        f"Skipping '{key_id} / {username}' (in whitelist)")
                    continue

                if issue.timestamps.reported is None:
                    logging.debug(
                        f"Skipping '{key_id} / {username}' (was not reported)")
                    continue

                if issue.timestamps.remediated is not None:
                    logging.debug(
                        f"Skipping '{key_id} / {username}' (has been already remediated)"
                    )
                    continue

                updated_date = issue.timestamp_as_datetime
                no_of_days_issue_created = (self.config.now -
                                            updated_date).days

                if no_of_days_issue_created >= retention_period:
                    try:
                        if not batch and \
                           not confirm(f"Do you want to remediate inactive access key '{key_id} / {username}'", False):
                            continue

                        account = Account(
                            id=account_id,
                            name=account_name,
                            role_name=self.config.aws.role_name_reporting)
                        if account.session is None:
                            continue

                        logging.debug(
                            f"Remediating inactive access key '{key_id} / {username}'"
                        )
                        remediation_succeed = True
                        try:
                            IAMOperations.disable_access_key(
                                account.client("iam"), username, key_id)
                            comment = (
                                f"Inactive access key '{key_id} / {username}' issue "
                                f"in '{account_name} / {account_id}' account "
                                f"was remediated by hammer")
                        except Exception:
                            remediation_succeed = False
                            logging.exception(
                                "Failed to disable '{key_id} / {username}' inactive access key"
                            )
                            comment = (
                                f"Failed to remediate inactive access key '{key_id} / {username}' issue "
                                f"in '{account_name} / {account_id}' account "
                                f"due to some limitations. Please, check manually"
                            )

                        jira.remediate_issue(
                            ticket_id=issue.jira_details.ticket,
                            comment=comment,
                            reassign=remediation_succeed,
                        )
                        slack.report_issue(
                            msg=f"{comment}"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            account_id=account_id,
                        )
                        IssueOperations.set_status_remediated(ddb_table, issue)
                    except Exception:
                        logging.exception(
                            f"Error occurred while disabling '{username} / {key_id}' "
                            f"in '{account_name} / {account_id}'")
                else:
                    logging.debug(
                        f"Skipping '{key_id} / {username}' "
                        f"({retention_period - no_of_days_issue_created} days before remediation)"
                    )
Ejemplo n.º 15
0
    def clean_public_rds_snapshots(self, batch=False):
        """ Class method to remediate public rds snapshot """
        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(
            self.config.rdsSnapshot.ddb_table_name)
        #backup_bucket = config.aws.s3_backup_bucket

        retention_period = self.config.rdsSnapshot.remediation_retention_period

        jira = JiraReporting(self.config)
        slack = SlackNotification(self.config)

        for account_id, account_name in self.config.rdsSnapshot.remediation_accounts.items(
        ):
            logging.debug(f"Checking '{account_name} / {account_id}'")
            issues = IssueOperations.get_account_open_issues(
                ddb_table, account_id, RdsPublicSnapshotIssue)
            for issue in issues:
                if issue.timestamps.remediated is not None:
                    logging.debug(
                        f"Skipping '{issue.issue_id}' (has been already remediated)"
                    )
                    continue

                in_whitelist = self.config.rdsSnapshot.in_whitelist(
                    account_id, issue.issue_id)
                if in_whitelist:
                    logging.debug(
                        f"Skipping '{issue.issue_id}' (in whitelist)")

                    # Adding label with "whitelisted" to jira ticket.
                    jira.add_label(ticket_id=issue.jira_details.ticket,
                                   label=IssueStatus.Whitelisted.value)
                    continue

                if issue.timestamps.reported is None:
                    logging.debug(
                        f"Skipping '{issue.issue_id}' (was not reported)")
                    continue

                updated_date = issue.timestamp_as_datetime
                no_of_days_issue_created = (self.config.now -
                                            updated_date).days

                if no_of_days_issue_created >= retention_period:
                    owner = issue.jira_details.owner
                    bu = issue.jira_details.business_unit
                    product = issue.jira_details.product

                    try:
                        if not batch and \
                           not confirm(f"Do you want to remediate public RDS snapshot '{issue.issue_id}'", False):
                            continue

                        account = Account(
                            id=account_id,
                            name=account_name,
                            region=issue.issue_details.region,
                            role_name=self.config.aws.role_name_reporting)
                        if account.session is None:
                            continue

                        remediation_succeed = True
                        try:
                            RdsSnapshotOperations.make_private(
                                account.client("rds"),
                                issue.issue_details.engine,
                                issue.issue_details.name)
                            comment = (
                                f"RDS public snapshot '{issue.issue_id}' issue "
                                f"in '{account_name} / {account_id}' account, '{issue.issue_details.region}' region "
                                f"was remediated by hammer")
                        except Exception:
                            remediation_succeed = False
                            logging.exception(
                                f"Failed to make private '{issue.issue_id}' RDS public snapshot"
                            )
                            comment = (
                                f"Failed to remediate RDS public snapshot '{issue.issue_id}' issue "
                                f"in '{account_name} / {account_id}' account, '{issue.issue_details.region}' region "
                                f"due to some limitations. Please, check manually"
                            )

                        jira.remediate_issue(
                            ticket_id=issue.jira_details.ticket,
                            comment=comment,
                            reassign=remediation_succeed,
                        )
                        slack.report_issue(
                            msg=f"{comment}"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                        IssueOperations.set_status_remediated(ddb_table, issue)
                    except Exception:
                        logging.exception(
                            f"Error occurred while updating RDS snapshot {issue.issue_id} "
                            f"in {account_id}/{issue.issue_details.region}")
    def create_tickets_s3buckets(self):
        """ Class method to create jira tickets """
        table_name = self.config.s3policy.ddb_table_name

        main_account = Account(region=self.config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(table_name)
        jira = JiraReporting(self.config)
        slack = SlackNotification(self.config)

        for account_id, account_name in self.config.aws.accounts.items():
            logging.debug(f"Checking '{account_name} / {account_id}'")
            issues = IssueOperations.get_account_not_closed_issues(
                ddb_table, account_id, S3PolicyIssue)
            for issue in issues:
                bucket_name = issue.issue_id
                tags = issue.issue_details.tags
                policy = issue.issue_details.policy
                # issue has been already reported
                if issue.timestamps.reported is not None:
                    owner = issue.issue_details.owner
                    bu = issue.jira_details.business_unit
                    product = issue.jira_details.product

                    if issue.status in [
                            IssueStatus.Resolved, IssueStatus.Whitelisted
                    ]:
                        logging.debug(
                            f"Closing {issue.status.value} S3 bucket '{bucket_name}' public policy issue"
                        )

                        comment = (
                            f"Closing {issue.status.value} S3 bucket '{bucket_name}' public policy "
                            f"in '{account_name} / {account_id}' account ")
                        jira.close_issue(ticket_id=issue.jira_details.ticket,
                                         comment=comment)
                        slack.report_issue(
                            msg=f"{comment}"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                        IssueOperations.set_status_closed(ddb_table, issue)
                    # issue.status != IssueStatus.Closed (should be IssueStatus.Open)
                    elif issue.timestamps.updated > issue.timestamps.reported:
                        logging.debug(
                            f"Updating S3 bucket '{bucket_name}' public policy issue"
                        )

                        comment = "Issue details are changed, please check again.\n"
                        # Adding new bucket policy json as attachment to Jira ticket.
                        attachment = jira.add_attachment(
                            ticket_id=issue.jira_details.ticket,
                            filename=self.attachment_name(
                                account_id, bucket_name),
                            text=policy)
                        if attachment is not None:
                            comment += f"New policy - [^{attachment.filename}].\n"
                        comment += JiraOperations.build_tags_table(tags)
                        jira.update_issue(ticket_id=issue.jira_details.ticket,
                                          comment=comment)
                        slack.report_issue(
                            msg=
                            f"S3 bucket '{bucket_name}' pubic policy issue is changed "
                            f"in '{account_name} / {account_id}' account"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                        IssueOperations.set_status_updated(ddb_table, issue)
                    else:
                        logging.debug(f"No changes for '{bucket_name}'")
                # issue has not been reported yet
                else:
                    logging.debug(
                        f"Reporting S3 bucket '{bucket_name}' public policy issue"
                    )

                    owner = tags.get("owner", None)
                    bu = tags.get("bu", None)
                    product = tags.get("product", None)

                    if bu is None:
                        bu = self.config.get_bu_by_name(bucket_name)

                    issue_summary = (
                        f"S3 bucket '{bucket_name}' with public policy "
                        f"in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}"
                    )

                    issue_description = (
                        f"Bucket policy allows unrestricted public access.\n\n"
                        f"*Threat*: "
                        f"This creates potential security vulnerabilities by allowing anyone to add, modify, or remove items in a bucket.\n\n"
                        f"*Risk*: High\n\n"
                        f"*Account Name*: {account_name}\n"
                        f"*Account ID*: {account_id}\n"
                        f"*S3 Bucket name*: {bucket_name}\n"
                        f"*Bucket Owner*: {owner}\n"
                        f"\n")

                    auto_remediation_date = (
                        self.config.now +
                        self.config.s3policy.issue_retention_date).date()
                    issue_description += f"\n{{color:red}}*Auto-Remediation Date*: {auto_remediation_date}{{color}}\n\n"

                    issue_description += JiraOperations.build_tags_table(tags)

                    issue_description += f"\n"
                    issue_description += (
                        f"*Recommendation*: "
                        f"Check if global access is truly needed and "
                        f"if not - update bucket permissions to restrict access to specific private IP ranges from RFC1819."
                    )

                    try:
                        response = jira.add_issue(
                            issue_summary=issue_summary,
                            issue_description=issue_description,
                            priority="Major",
                            labels=["publics3"],
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                    except Exception:
                        logging.exception("Failed to create jira ticket")
                        continue

                    if response is not None:
                        issue.jira_details.ticket = response.ticket_id
                        issue.jira_details.ticket_assignee_id = response.ticket_assignee_id
                        # Adding bucket policy json as attachment to Jira ticket.
                        jira.add_attachment(
                            ticket_id=issue.jira_details.ticket,
                            filename=self.attachment_name(
                                account_id, bucket_name),
                            text=policy)

                    issue.jira_details.owner = owner
                    issue.jira_details.business_unit = bu
                    issue.jira_details.product = product

                    slack.report_issue(
                        msg=f"Discovered {issue_summary}"
                        f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                        owner=owner,
                        account_id=account_id,
                        bu=bu,
                        product=product,
                    )

                    IssueOperations.set_status_reported(ddb_table, issue)
Ejemplo n.º 17
0
def lambda_handler(event, context):
    """ Lambda handler to evaluate iam user keys rotation """
    set_logging(level=logging.INFO)

    try:
        payload = json.loads(event["Records"][0]["Sns"]["Message"])
        account_id = payload['account_id']
        account_name = payload['account_name']
    except Exception:
        logging.exception(f"Failed to parse event\n{event}")
        return

    try:
        config = Config()

        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(
            config.iamUserKeysRotation.ddb_table_name)

        account = Account(id=account_id,
                          name=account_name,
                          role_name=config.aws.role_name_identification)
        if account.session is None:
            return

        logging.debug(f"Checking for IAM user keys rotation for {account}")

        # existing open issues for account to check if resolved
        open_issues = IssueOperations.get_account_open_issues(
            ddb_table, account_id, IAMKeyRotationIssue)
        # make dictionary for fast search by id
        # and filter by current region
        open_issues = {issue.issue_id: issue for issue in open_issues}
        logging.debug(
            f"Users with keys to rotate in DDB:\n{open_issues.keys()}")

        checker = IAMKeyChecker(account=account,
                                now=config.now,
                                rotation_criteria_days=config.
                                iamUserKeysRotation.rotation_criteria_days)
        if not checker.check(last_used_check_enabled=False):
            return

        for user in checker.users:
            for key in user.stale_keys:
                issue = IAMKeyRotationIssue(account_id, key.id)
                issue.issue_details.username = user.id
                issue.issue_details.create_date = key.create_date.isoformat()
                if config.iamUserKeysRotation.in_whitelist(
                        account_id,
                        key.id) or config.iamUserKeysRotation.in_whitelist(
                            account_id, user.id):
                    issue.status = IssueStatus.Whitelisted
                else:
                    issue.status = IssueStatus.Open
                logging.debug(
                    f"Setting {key.id}/{user.id} status {issue.status}")
                IssueOperations.update(ddb_table, issue)
                # remove issue id from issues_list_from_db (if exists)
                # as we already checked it
                open_issues.pop(key.id, None)

        logging.debug(f"Keys to rotate in DDB:\n{open_issues.keys()}")
        # all other unresolved issues in DDB are for removed/remediated keys
        for issue in open_issues.values():
            IssueOperations.set_status_resolved(ddb_table, issue)
    except Exception:
        logging.exception(
            f"Failed to check IAM user keys rotation for '{account_id} ({account_name})'"
        )
        return

    logging.debug(
        f"Checked IAM user keys rotation for '{account_id} ({account_name})'")
Ejemplo n.º 18
0
def lambda_handler(event, context):
    """ Lambda Handler to describe cloud trails enabled or not for each region """
    set_logging(level=logging.INFO)

    try:
        payload = json.loads(event["Records"][0]["Sns"]["Message"])
        account_id = payload['account_id']
        account_name = payload['account_name']
        # get the last region from the list to process
        region = payload['regions'].pop()
        # if request_id is present in payload then this lambda was called from the API
        request_id = payload.get('request_id', None)
    except Exception:
        logging.exception(f"Failed to parse event\n{event}")
        return

    try:
        config = Config()

        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(
            config.cloudtrails.ddb_table_name)

        account = Account(id=account_id,
                          name=account_name,
                          region=region,
                          role_name=config.aws.role_name_identification)
        if account.session is None:
            return

        logging.debug(f"Checking for CloudTrail logging issues in {account}")

        # existing open issues for account to check if resolved
        open_issues = IssueOperations.get_account_open_issues(
            ddb_table, account_id, CloudTrailIssue)
        # make dictionary for fast search by id
        # and filter by current region
        open_issues = {
            issue.issue_id: issue
            for issue in open_issues if issue.issue_id == region
        }
        logging.debug(f"CloudTrail region in DDB:\n{open_issues.keys()}")

        checker = CloudTrailChecker(account=account)
        if checker.check():
            if checker.disabled or checker.delivery_errors:
                issue = CloudTrailIssue(account_id, region)
                issue.issue_details.disabled = checker.disabled
                issue.issue_details.delivery_errors = checker.delivery_errors
                issue.add_trails(checker.trails)
                if config.cloudtrails.in_whitelist(account_id, region):
                    issue.status = IssueStatus.Whitelisted
                else:
                    issue.status = IssueStatus.Open
                logging.debug(f"Setting {region} status {issue.status}")
                IssueOperations.update(ddb_table, issue)
            # issue exists in ddb and was fixed
            elif region in open_issues:
                IssueOperations.set_status_resolved(ddb_table,
                                                    open_issues[region])
        # track the progress of API request to scan specific account/region/feature
        if request_id:
            api_table = main_account.resource("dynamodb").Table(
                config.api.ddb_table_name)
            DDB.track_progress(api_table, request_id)
    except Exception:
        logging.exception(
            f"Failed to check CloudTrail in '{region}' for '{account_id} ({account_name})'"
        )

    # push SNS messages until the list with regions to check is empty
    if len(payload['regions']) > 0:
        try:
            Sns.publish(payload["sns_arn"], payload)
        except Exception:
            logging.exception("Failed to chain CloudTrail checking")

    logging.debug(
        f"Checked CloudTrail in '{region}' for '{account_id} ({account_name})'"
    )
Ejemplo n.º 19
0
    def cleans3bucketunencrypted(self, batch=False):
        """ Class method to clean S3 buckets which are violating aws best practices """
        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(self.config.s3Encrypt.ddb_table_name)

        retention_period = self.config.s3Encrypt.remediation_retention_period

        jira = JiraReporting(self.config)
        slack = SlackNotification(self.config)

        for account_id, account_name in self.config.aws.accounts.items():
            logging.debug(f"Checking '{account_name} / {account_id}'")
            issues = IssueOperations.get_account_open_issues(ddb_table, account_id, S3EncryptionIssue)
            for issue in issues:
                bucket_name = issue.issue_id

                in_whitelist = self.config.s3Encrypt.in_whitelist(account_id, bucket_name)
                in_fixlist = True

                if in_whitelist:
                    logging.debug(f"Skipping {bucket_name} (in whitelist)")
                    continue
                if not in_fixlist:
                    logging.debug(f"Skipping {bucket_name} (not in fixlist)")
                    continue

                if issue.timestamps.reported is None:
                    logging.debug(f"Skipping '{bucket_name}' (was not reported)")
                    continue

                if issue.timestamps.remediated is not None:
                    logging.debug(f"Skipping {bucket_name} (has been already remediated)")
                    continue

                updated_date = issue.timestamp_as_datetime
                no_of_days_issue_created = (self.config.now - updated_date).days

                if no_of_days_issue_created >= retention_period:
                    owner = issue.jira_details.owner
                    bu = issue.jira_details.business_unit
                    product = issue.jira_details.product

                    try:
                        if not batch and \
                           not confirm(f"Do you want to remediate '{bucket_name}' S3 bucket unencrypted", False):
                            continue

                        account = Account(id=account_id,
                                          name=account_name,
                                          role_name=self.config.aws.role_name_reporting)
                        if account.session is None:
                            continue

                        checker = S3EncryptionChecker(account=account)
                        checker.check(buckets=[bucket_name])
                        s3bucket = checker.get_bucket(bucket_name)

                        if s3bucket is None:
                            logging.debug(f"Bucket {s3bucket.name} was removed by user")
                        elif s3bucket.encrypted:
                            logging.debug(f"Bucket {s3bucket.name} unencrypted issue was remediated by user")
                        else:
                            logging.debug(f"Remediating '{s3bucket.name}' unencrypted")
                            # kms_key_id = None
                            remediation_succeed = True
                            if s3bucket.encrypt_bucket():
                                comment = (f"Bucket '{s3bucket.name}' unencrypted issue "
                                           f"in '{account_name} / {account_id}' account "
                                           f"was remediated by hammer")
                            else:
                                remediation_succeed = False
                                comment = (f"Failed to remediate bucket '{s3bucket.name}' unencrypted issue "
                                           f"in '{account_name} / {account_id}' account "
                                           f"due to some limitations. Please, check manually")

                            jira.remediate_issue(
                                ticket_id=issue.jira_details.ticket,
                                comment=comment,
                                reassign=remediation_succeed,
                            )
                            slack.report_issue(
                                msg=f"{comment}"
                                    f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                                owner=owner,
                                account_id=account_id,
                                bu=bu, product=product,
                            )
                            IssueOperations.set_status_remediated(ddb_table, issue)
                    except Exception:
                        logging.exception(f"Error occurred while updating bucket '{bucket_name}' unencrypted "
                                          f"in '{account_name} / {account_id}'")
                else:
                    logging.debug(f"Skipping '{bucket_name}' "
                                  f"({retention_period - no_of_days_issue_created} days before remediation)")
Ejemplo n.º 20
0
def start_scan(account_id, regions, security_features, tags, ids):
    config = Config()

    account_name = config.aws.accounts.get(account_id, None)

    if not account_id:
        return bad_request(text="account_id is required parameter")

    if account_name is None:
        return bad_request(text=f"account '{account_id}' is not defined")

    valid_security_features = [module.section for module in config.modules]
    for security_feature in security_features:
        if security_feature not in valid_security_features:
            return bad_request(
                text=
                f"wrong security feature - '{security_feature}', available choices - {valid_security_features}"
            )

    if not security_features:
        security_features = valid_security_features

    all_regions = config.aws.regions

    for region in regions:
        if region not in all_regions:
            return bad_request(text=f"Region '{region} is not supported")
    # empty list means we want to scan all supported regions
    if not regions:
        regions = all_regions

    if ids is not None and not isinstance(ids, list):
        return bad_request(text=f"'ids' parameter must be list")

    if tags is not None and not isinstance(tags, dict):
        return bad_request(text=f"'tags' parameter must be dict")

    main_account = Account(region=config.aws.region)
    api_table = main_account.resource("dynamodb").Table(
        config.api.ddb_table_name)
    to_scan = []
    for security_feature in security_features:
        accounts = config.get_module_config_by_name(security_feature).accounts
        if account_id in accounts:
            to_scan.append(security_feature)
    regional_services = set(to_scan) - set(GLOBAL_SECURITY_FEATURES)
    global_services = set(to_scan).intersection(set(GLOBAL_SECURITY_FEATURES))
    total = len(regional_services) * len(regions) + len(global_services)
    request_params = {
        "account_id": account_id,
        "regions": regions,
        "security_features": to_scan,
        "tags": tags
    }
    request_id = uuid.uuid4().hex

    DDB.add_request(api_table, request_id, request_params, total)

    for security_feature in to_scan:
        topic_name = config.get_module_config_by_name(
            security_feature).sns_topic_name
        topic_arn = get_sns_topic_arn(config, topic_name)
        payload = {
            "account_id": account_id,
            "account_name": account_name,
            "regions": regions,
            "sns_arn": topic_arn,
            "request_id": request_id
        }
        Sns.publish(topic_arn, payload)

    response = {'request_id': request_id}

    return {
        "statusCode":
        200,
        "body":
        json.dumps(response, indent=4)
        if isinstance(response, dict) else response
    }
Ejemplo n.º 21
0
    def create_tickets_public_ami(self):
        """ Class method to create jira tickets """
        table_name = self.config.publicAMIs.ddb_table_name

        main_account = Account(region=self.config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(table_name)
        jira = JiraReporting(self.config)
        slack = SlackNotification(self.config)

        for account_id, account_name in self.config.publicAMIs.accounts.items(
        ):
            logging.debug(f"Checking '{account_name} / {account_id}'")
            issues = IssueOperations.get_account_not_closed_issues(
                ddb_table, account_id, PublicAMIIssue)
            for issue in issues:
                ami_id = issue.issue_id
                ami_region = issue.issue_details.region
                tags = issue.issue_details.tags
                # issue has been already reported
                if issue.timestamps.reported is not None:
                    owner = issue.issue_details.owner
                    bu = issue.jira_details.business_unit
                    product = issue.jira_details.product

                    if issue.status in [
                            IssueStatus.Resolved, IssueStatus.Whitelisted
                    ]:
                        logging.debug(
                            f"Closing {issue.status.value} AMI '{ami_id}' public access issue"
                        )

                        comment = (
                            f"Closing {issue.status.value} AMI '{ami_id}' public access issue "
                            f"in '{account_name} / {account_id}' account, {ami_region} region"
                        )
                        if issue.status == IssueStatus.Whitelisted:
                            # Adding label with "whitelisted" to jira ticket.
                            jira.add_label(ticket_id=issue.jira_details.ticket,
                                           label=IssueStatus.Whitelisted.value)
                        jira.close_issue(ticket_id=issue.jira_details.ticket,
                                         comment=comment)
                        slack.report_issue(
                            msg=f"{comment}"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                        IssueOperations.set_status_closed(ddb_table, issue)
                    # issue.status != IssueStatus.Closed (should be IssueStatus.Open)
                    elif issue.timestamps.updated > issue.timestamps.reported:
                        logging.debug(
                            f"Updating AMI '{ami_id}' public access issue")

                        comment = "Issue details are changed, please check again.\n"

                        comment += JiraOperations.build_tags_table(tags)
                        jira.update_issue(ticket_id=issue.jira_details.ticket,
                                          comment=comment)
                        slack.report_issue(
                            msg=f"AMI '{ami_id}' pubic access issue is changed "
                            f"in '{account_name} / {account_id}' account, {ami_region} region"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                        IssueOperations.set_status_updated(ddb_table, issue)
                    else:
                        logging.debug(f"No changes for '{ami_id}'")
                # issue has not been reported yet
                else:
                    logging.debug(
                        f"Reporting AMI '{ami_id}' public access issue")

                    owner = tags.get("owner", None)
                    bu = tags.get("bu", None)
                    product = tags.get("product", None)

                    if bu is None:
                        bu = self.config.get_bu_by_name(ami_id)

                    issue_summary = (
                        f"AMI '{ami_id}' with public access "
                        f"in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}"
                    )

                    issue_description = (f"AMI allows public access.\n\n"
                                         f"*Threat*: "
                                         f" .\n\n"
                                         f"*Risk*: High\n\n"
                                         f"*Account Name*: {account_name}\n"
                                         f"*Account ID*: {account_id}\n"
                                         f"*Region*: {ami_region}\n"
                                         f"*AMI Id*: {ami_id}\n"
                                         f"\n")

                    auto_remediation_date = (
                        self.config.now +
                        self.config.publicAMIs.issue_retention_date).date()
                    issue_description += f"\n{{color:red}}*Auto-Remediation Date*: {auto_remediation_date}{{color}}\n\n"

                    issue_description += JiraOperations.build_tags_table(tags)

                    issue_description += f"\n"
                    issue_description += (
                        f"*Recommendation*: "
                        f"Check if public access is truly needed and "
                        f"if not - update AMI permissions to restrict access to specific accounts."
                    )

                    try:
                        response = jira.add_issue(
                            issue_summary=issue_summary,
                            issue_description=issue_description,
                            priority="Major",
                            labels=["public-ami"],
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                    except Exception:
                        logging.exception("Failed to create jira ticket")
                        continue

                    if response is not None:
                        issue.jira_details.ticket = response.ticket_id
                        issue.jira_details.ticket_assignee_id = response.ticket_assignee_id

                    issue.jira_details.owner = owner
                    issue.jira_details.business_unit = bu
                    issue.jira_details.product = product

                    slack.report_issue(
                        msg=f"Discovered {issue_summary}"
                        f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                        owner=owner,
                        account_id=account_id,
                        bu=bu,
                        product=product,
                    )

                    IssueOperations.set_status_reported(ddb_table, issue)
Ejemplo n.º 22
0
    def create_tickets_cloud_trail_logging(self):
        """ Class function to create jira tickets """
        table_name = self.config.cloudtrails.ddb_table_name

        main_account = Account(region=self.config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(table_name)
        jira = JiraReporting(self.config)
        slack = SlackNotification(self.config)

        for account_id, account_name in self.config.cloudtrails.accounts.items(
        ):
            logging.debug(f"Checking '{account_name} / {account_id}'")
            issues = IssueOperations.get_account_not_closed_issues(
                ddb_table, account_id, CloudTrailIssue)
            for issue in issues:
                region = issue.issue_id
                # issue has been already reported
                if issue.timestamps.reported is not None:
                    if issue.status in [
                            IssueStatus.Resolved, IssueStatus.Whitelisted
                    ]:
                        logging.debug(
                            f"Closing {issue.status.value} '{region}' CloudTrail logging issue"
                        )

                        comment = (
                            f"Closing {issue.status.value} issue with '{region}' CloudTrail logging in "
                            f"'{account_name} / {account_id}'")
                        jira.close_issue(ticket_id=issue.jira_details.ticket,
                                         comment=comment)
                        slack.report_issue(
                            msg=f"{comment}"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            account_id=account_id,
                        )
                        IssueOperations.set_status_closed(ddb_table, issue)
                    # issue.status != IssueStatus.Closed (should be IssueStatus.Open)
                    elif issue.timestamps.updated > issue.timestamps.reported:
                        logging.debug(f"Updating '{region}' issue")

                        comment = "Issue details are changed, please check again.\n"
                        comment += self.build_trail_status(
                            issue.issue_details.disabled,
                            issue.issue_details.delivery_errors)
                        comment += f"\n\n"
                        comment += self.build_trails_table(
                            issue.issue_details.trails)
                        jira.update_issue(ticket_id=issue.jira_details.ticket,
                                          comment=comment)
                        slack.report_issue(
                            msg=
                            f"CloudTrail logging '{region}' ssue is changed in "
                            f"'{account_name} / {account_id}'"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            account_id=account_id,
                        )
                        IssueOperations.set_status_updated(ddb_table, issue)
                    else:
                        logging.debug(f"No changes for '{region}' issue")
                # issue has not been reported yet
                else:
                    logging.debug(
                        f"Reporting '{region}' CloudTrail logging issue")

                    if issue.issue_details.disabled:
                        issue_summary = f"Disabled CloudTrail in '{account_name} / {account_id} / {region}' "
                        issue_description = "No enabled CloudTrails for region available."
                        recommendation = f"Create CloudTrail for region"
                    elif issue.issue_details.delivery_errors:
                        issue_summary = f"CloudTrail logging issues in '{account_name} / {account_id} / {region}' "
                        issue_description = "CloudTrail has issues with logging."
                        recommendation = f"Check policies for CloudTrail logging"
                    else:
                        raise Exception(
                            "not disabled and no errors, this should not have happened"
                        )

                    issue_description = (f"{issue_description}\n\n"
                                         f"*Risk*: High\n\n"
                                         f"*Account Name*: {account_name}\n"
                                         f"*Account ID*: {account_id}\n"
                                         f"*Region*: {region}\n")

                    issue_description += self.build_trail_status(
                        issue.issue_details.disabled,
                        issue.issue_details.delivery_errors)

                    issue_description += self.build_trails_table(
                        issue.issue_details.trails)

                    issue_description += f"\n\n*Recommendation*: {recommendation}. "

                    if self.config.whitelisting_procedure_url:
                        issue_description += (
                            f"For any other exceptions, please follow the [whitelisting procedure|{self.config.whitelisting_procedure_url}] "
                            f"and provide a strong business reasoning. ")

                    try:
                        response = jira.add_issue(
                            issue_summary=issue_summary,
                            issue_description=issue_description,
                            priority="Major",
                            labels=["cloud-trail-disabled"],
                            account_id=account_id,
                        )
                    except Exception:
                        logging.exception("Failed to create jira ticket")
                        continue

                    if response is not None:
                        issue.jira_details.ticket = response.ticket_id
                        issue.jira_details.ticket_assignee_id = response.ticket_assignee_id

                    slack.report_issue(
                        msg=f"Discovered {issue_summary}"
                        f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                        account_id=account_id,
                    )

                    IssueOperations.set_status_reported(ddb_table, issue)
Ejemplo n.º 23
0
    def create_tickets_s3buckets(self):
        """ Class method to create jira tickets """
        table_name = self.config.s3acl.ddb_table_name

        main_account = Account(region=self.config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(table_name)
        jira = JiraReporting(self.config)
        slack = SlackNotification(self.config)

        for account_id, account_name in self.config.s3acl.accounts.items():
            logging.debug(f"Checking '{account_name} / {account_id}'")
            issues = IssueOperations.get_account_not_closed_issues(
                ddb_table, account_id, S3AclIssue)
            for issue in issues:
                bucket_name = issue.issue_id
                tags = issue.issue_details.tags
                # issue has been already reported
                if issue.timestamps.reported is not None:
                    owner = issue.issue_details.owner
                    bu = issue.jira_details.business_unit
                    product = issue.jira_details.product

                    if issue.status in [
                            IssueStatus.Resolved, IssueStatus.Whitelisted
                    ]:
                        logging.debug(
                            f"Closing {issue.status.value} S3 bucket '{bucket_name}' public ACL issue"
                        )

                        comment = (
                            f"Closing {issue.status.value} S3 bucket '{bucket_name}' public ACL issue "
                            f"in '{account_name} / {account_id}' account")
                        if issue.status == IssueStatus.Whitelisted:
                            # Adding label with "whitelisted" to jira ticket.
                            jira.add_label(ticket_id=issue.jira_details.ticket,
                                           label=IssueStatus.Whitelisted.value)
                        jira.close_issue(ticket_id=issue.jira_details.ticket,
                                         comment=comment)
                        slack.report_issue(
                            msg=f"{comment}"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                        IssueOperations.set_status_closed(ddb_table, issue)
                    # issue.status != IssueStatus.Closed (should be IssueStatus.Open)
                    elif issue.timestamps.updated > issue.timestamps.reported:
                        logging.debug(
                            f"Updating S3 bucket '{bucket_name}' public ACL issue"
                        )

                        comment = "Issue details are changed, please check again.\n"
                        comment += self.build_permissions_table(
                            issue.issue_details.public_acls)
                        comment += JiraOperations.build_tags_table(tags)
                        jira.update_issue(ticket_id=issue.jira_details.ticket,
                                          comment=comment)
                        slack.report_issue(
                            msg=
                            f"S3 bucket '{bucket_name}' pubic ACL issue is changed "
                            f"in '{account_name} / {account_id}' account"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                        IssueOperations.set_status_updated(ddb_table, issue)
                    else:
                        logging.debug(f"No changes for '{bucket_name}'")
                # issue has not been reported yet
                else:
                    logging.debug(
                        f"Reporting S3 bucket '{bucket_name}' public ACL issue"
                    )

                    owner = tags.get("owner", None)
                    bu = tags.get("bu", None)
                    product = tags.get("product", None)

                    if bu is None:
                        bu = self.config.get_bu_by_name(bucket_name)

                    issue_summary = (
                        f"S3 bucket '{bucket_name}' with public acl "
                        f"in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}"
                    )

                    issue_description = (
                        f"Bucket ACL allows unrestricted public access.\n\n"
                        f"*Threat*: "
                        f"This creates potential security vulnerabilities by allowing anyone to add, modify, or remove items in a bucket.\n\n"
                        f"*Risk*: High\n\n"
                        f"*Account Name*: {account_name}\n"
                        f"*Account ID*: {account_id}\n"
                        f"*S3 Bucket name*: {bucket_name}\n"
                        f"*Bucket Owner*: {owner}\n"
                        f"\n")

                    auto_remediation_date = (
                        self.config.now +
                        self.config.s3acl.issue_retention_date).date()
                    issue_description += f"\n{{color:red}}*Auto-Remediation Date*: {auto_remediation_date}{{color}}\n\n"

                    issue_description += JiraOperations.build_tags_table(tags)

                    issue_description += self.build_permissions_table(
                        issue.issue_details.public_acls)

                    issue_description += f"\n"
                    issue_description += (
                        f"*Recommendation*: "
                        f"Update bucket permissions to restrict access to the owner or specific users "
                        f"or grant CloudFront OAI applicable permissions on each object that CloudFront distribution serves. "
                    )

                    if self.config.whitelisting_procedure_url:
                        issue_description += (
                            f"For any other exceptions, please follow the [whitelisting procedure|{self.config.whitelisting_procedure_url}] "
                            f"and provide a strong business reasoning. ")

                    try:
                        response = jira.add_issue(
                            issue_summary=issue_summary,
                            issue_description=issue_description,
                            priority="Major",
                            labels=["publics3"],
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                    except Exception:
                        logging.exception("Failed to create jira ticket")
                        continue

                    if response is not None:
                        issue.jira_details.ticket = response.ticket_id
                        issue.jira_details.ticket_assignee_id = response.ticket_assignee_id

                    issue.jira_details.owner = owner
                    issue.jira_details.business_unit = bu
                    issue.jira_details.product = product

                    slack.report_issue(
                        msg=f"Discovered {issue_summary}"
                        f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                        owner=owner,
                        account_id=account_id,
                        bu=bu,
                        product=product,
                    )

                    IssueOperations.set_status_reported(ddb_table, issue)
    def create_tickets_ebsvolumes(self):
        """ Class method to create jira tickets """
        table_name = self.config.ebsVolume.ddb_table_name

        main_account = Account(region=self.config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(table_name)
        jira = JiraReporting(self.config)
        slack = SlackNotification(self.config)

        for account_id, account_name in self.config.ebsVolume.accounts.items():
            logging.debug(f"Checking '{account_name} / {account_id}'")
            issues = IssueOperations.get_account_not_closed_issues(
                ddb_table, account_id, EBSUnencryptedVolumeIssue)
            for issue in issues:
                volume_id = issue.issue_id
                region = issue.issue_details.region
                tags = issue.issue_details.tags
                # issue has been already reported
                if issue.timestamps.reported is not None:
                    owner = issue.jira_details.owner
                    bu = issue.jira_details.business_unit
                    product = issue.jira_details.product

                    if issue.status in [
                            IssueStatus.Resolved, IssueStatus.Whitelisted
                    ]:
                        logging.debug(
                            f"Closing {issue.status.value} EBS unencrypted volume '{volume_id}' issue"
                        )

                        comment = (
                            f"Closing {issue.status.value} EBS unencrypted volume '{volume_id}' issue "
                            f"in '{account_name} / {account_id}' account, '{region}' region"
                        )
                        jira.close_issue(ticket_id=issue.jira_details.ticket,
                                         comment=comment)
                        slack.report_issue(
                            msg=f"{comment}"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                        IssueOperations.set_status_closed(ddb_table, issue)
                    # issue.status != IssueStatus.Closed (should be IssueStatus.Open)
                    elif issue.timestamps.updated > issue.timestamps.reported:
                        logging.error(
                            f"TODO: update jira ticket with new data: {table_name}, {account_id}, {volume_id}"
                        )
                        slack.report_issue(
                            msg=
                            f"EBS unencrypted volume '{volume_id}' issue is changed "
                            f"in '{account_name} / {account_id}' account, '{region}' region"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                        IssueOperations.set_status_updated(ddb_table, issue)
                    else:
                        logging.debug(f"No changes for '{volume_id}'")
                # issue has not been reported yet
                else:
                    logging.debug(
                        f"Reporting EBS unencrypted volume '{volume_id}' issue"
                    )

                    # if owner/bu/product tags exist on volume - use it
                    volume_owner = tags.get("owner", None)
                    volume_bu = tags.get("bu", None)
                    volume_product = tags.get("product", None)

                    issue_description = (
                        f"EBS volume needs to be encrypted.\n\n"
                        f"*Risk*: High\n\n"
                        f"*Account Name*: {account_name}\n"
                        f"*Account ID*: {account_id}\n"
                        f"*Region*: {region}\n"
                        f"*Volume ID*: {volume_id}\n")

                    ec2_details = ec2_owner = ec2_bu = ec2_product = None
                    if issue.issue_details.attachments:
                        account = Account(
                            id=account_id,
                            name=account_name,
                            region=region,
                            role_name=self.config.aws.role_name_reporting)

                        if account.session is not None:
                            ec2_client = account.client("ec2")
                            ec2_instances = []
                            for instance_id, state in issue.issue_details.attachments.items(
                            ):
                                metadata = EC2Operations.get_instance_meta_data(
                                    ec2_client, instance_id)
                                if metadata is not None:
                                    ec2_instances.append({
                                        'ec2': metadata,
                                        'state': state
                                    })
                            ec2_details, ec2_owner, ec2_bu, ec2_product = self.build_instances_table(
                                ec2_instances)

                    owner = volume_owner if volume_owner is not None else ec2_owner
                    bu = volume_bu if volume_bu is not None else ec2_bu
                    product = volume_product if volume_product is not None else ec2_product

                    issue_description += JiraOperations.build_tags_table(tags)

                    issue_description += ec2_details if ec2_details else ''

                    issue_description += "*Recommendation*: Encrypt EBS volume. "

                    if self.config.whitelisting_procedure_url:
                        issue_description += (
                            f"For any other exceptions, please follow the [whitelisting procedure|{self.config.whitelisting_procedure_url}] "
                            f"and provide a strong business reasoning. ")

                    issue_summary = (
                        f"EBS unencrypted volume '{volume_id}' "
                        f" in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}"
                    )

                    try:
                        response = jira.add_issue(
                            issue_summary=issue_summary,
                            issue_description=issue_description,
                            priority="Major",
                            labels=["unencrypted-ebs-volumes"],
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                    except Exception:
                        logging.exception("Failed to create jira ticket")
                        continue

                    if response is not None:
                        issue.jira_details.ticket = response.ticket_id
                        issue.jira_details.ticket_assignee_id = response.ticket_assignee_id

                    issue.jira_details.owner = owner
                    issue.jira_details.business_unit = bu
                    issue.jira_details.product = product

                    slack.report_issue(
                        msg=f"Discovered {issue_summary}"
                        f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                        owner=owner,
                        account_id=account_id,
                        bu=bu,
                        product=product,
                    )

                    IssueOperations.set_status_reported(ddb_table, issue)
    def create_tickets_rds_unencrypted_instances(self):
        """ Class method to create jira tickets """
        table_name = self.config.rdsEncrypt.ddb_table_name

        main_account = Account(region=self.config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(table_name)
        jira = JiraReporting(self.config)
        slack = SlackNotification(self.config)

        for account_id, account_name in self.config.aws.accounts.items():
            logging.debug(f"Checking '{account_name} / {account_id}'")
            issues = IssueOperations.get_account_not_closed_issues(
                ddb_table, account_id, RdsEncryptionIssue)
            for issue in issues:
                instance_id = issue.issue_id
                instance_name = issue.issue_details.name
                region = issue.issue_details.region
                tags = issue.issue_details.tags
                # issue has been already reported
                if issue.timestamps.reported is not None:
                    owner = issue.jira_details.owner
                    bu = issue.jira_details.business_unit
                    product = issue.jira_details.product

                    if issue.status in [
                            IssueStatus.Resolved, IssueStatus.Whitelisted
                    ]:
                        logging.debug(
                            f"Closing {issue.status.value} RDS unencrypted instance '{instance_name}' issue"
                        )

                        comment = (
                            f"Closing {issue.status.value} RDS unencrypted instance '{instance_name}' issue "
                            f"in '{account_name} / {account_id}' account, '{region}' region"
                        )
                        if issue.status == IssueStatus.Whitelisted:
                            # Adding label with "whitelisted" to jira ticket.
                            jira.add_label(ticket_id=issue.jira_details.ticket,
                                           label=IssueStatus.Whitelisted.value)
                        jira.close_issue(ticket_id=issue.jira_details.ticket,
                                         comment=comment)
                        slack.report_issue(
                            msg=f"{comment}"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                        IssueOperations.set_status_closed(ddb_table, issue)
                    # issue.status != IssueStatus.Closed (should be IssueStatus.Open)
                    elif issue.timestamps.updated > issue.timestamps.reported:
                        logging.error(
                            f"TODO: update jira ticket with new data: {table_name}, {account_id}, {instance_name}"
                        )
                        slack.report_issue(
                            msg=
                            f"RDS unencrypted instance '{instance_name}' issue is changed "
                            f"in '{account_name} / {account_id}' account, '{region}' region"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                        IssueOperations.set_status_updated(ddb_table, issue)
                    else:
                        logging.debug(f"No changes for '{instance_name}'")
                # issue has not been reported yet
                else:
                    logging.debug(
                        f"Reporting RDS unencrypted instance '{instance_name}' issue"
                    )

                    owner = tags.get("owner", None)
                    bu = tags.get("bu", None)
                    product = tags.get("product", None)

                    issue_summary = (
                        f"RDS unencrypted instance '{instance_name}'"
                        f"in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}"
                    )

                    issue_description = (
                        f"The RDS instance is unencrypted.\n\n"
                        f"*Threat*: "
                        f"Based on data protection policies, data that is classified as sensitive information or "
                        f"intellectual property of the organization needs to be encrypted. Additionally, as part of the "
                        f"initiative of Encryption Everywhere, it is necessary to encrypt the data in order to ensure the "
                        f"confidentiality and integrity of the data.\n\n"
                        f"*Risk*: High\n\n"
                        f"*Account Name*: {account_name}\n"
                        f"*Account ID*: {account_id}\n"
                        f"*Region*: {region}\n"
                        f"*RDS Instance ID*: {instance_id}\n")

                    issue_description += JiraOperations.build_tags_table(tags)

                    issue_description += "\n"
                    issue_description += (f"*Recommendation*: "
                                          f"Encrypt RDS instance.")

                    try:
                        response = jira.add_issue(
                            issue_summary=issue_summary,
                            issue_description=issue_description,
                            priority="Major",
                            labels=["rds-unencrypted-instances"],
                            owner=owner,
                            account_id=account_id,
                            bu=bu,
                            product=product,
                        )
                    except Exception:
                        logging.exception("Failed to create jira ticket")
                        continue

                    if response is not None:
                        issue.jira_details.ticket = response.ticket_id
                        issue.jira_details.ticket_assignee_id = response.ticket_assignee_id

                    issue.jira_details.owner = owner
                    issue.jira_details.business_unit = bu
                    issue.jira_details.product = product

                    slack.report_issue(
                        msg=f"Discovered {issue_summary}"
                        f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                        owner=owner,
                        account_id=account_id,
                        bu=bu,
                        product=product,
                    )

                    IssueOperations.set_status_reported(ddb_table, issue)
Ejemplo n.º 26
0
    def create_tickets_securitygroups(self):
        """ Class function to create jira tickets """
        table_name = self.config.sg.ddb_table_name

        main_account = Account(region=self.config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(table_name)
        jira = JiraReporting(self.config)
        slack = SlackNotification(self.config)

        for account_id, account_name in self.config.sg.accounts.items():
            logging.debug(f"Checking '{account_name} / {account_id}'")
            issues = IssueOperations.get_account_not_closed_issues(ddb_table, account_id, SecurityGroupIssue)
            for issue in issues:
                group_id = issue.issue_id
                group_name = issue.issue_details.name
                group_region = issue.issue_details.region
                group_vpc_id = issue.issue_details.vpc_id
                tags = issue.issue_details.tags
                # issue has been already reported
                if issue.timestamps.reported is not None:
                    owner = issue.jira_details.owner
                    bu = issue.jira_details.business_unit
                    product = issue.jira_details.product

                    if issue.status in [IssueStatus.Resolved, IssueStatus.Whitelisted]:
                        logging.debug(f"Closing {issue.status.value} security group '{group_name} / {group_id}' issue")

                        comment = (f"Closing {issue.status.value} security group '{group_name} / {group_id}' issue "
                                   f"in '{account_name} / {account_id}' account, '{group_region}' region")
                        if issue.status == IssueStatus.Whitelisted:
                            # Adding label with "whitelisted" to jira ticket.
                            jira.add_label(
                                ticket_id=issue.jira_details.ticket,
                                label=IssueStatus.Whitelisted.value
                            )
                        jira.close_issue(
                            ticket_id=issue.jira_details.ticket,
                            comment=comment
                        )
                        slack.report_issue(
                            msg=f"{comment}"
                                f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu, product=product,
                        )
                        IssueOperations.set_status_closed(ddb_table, issue)
                    # issue.status != IssueStatus.Closed (should be IssueStatus.Open)
                    elif issue.timestamps.updated > issue.timestamps.reported:
                        logging.debug(f"Updating security group '{group_name} / {group_id}' issue")

                        comment = "Issue details are changed, please check again.\n"
                        comment += self.build_open_ports_table_jira(issue.issue_details.perms)
                        comment += JiraOperations.build_tags_table(tags)
                        jira.update_issue(
                            ticket_id=issue.jira_details.ticket,
                            comment=comment
                        )
                        slack.report_issue(
                            msg=f"Security group '{group_name} / {group_id}' issue is changed "
                                f"in '{account_name} / {account_id}' account, '{group_region}' region"
                                f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}"
                                f"\n"
                                f"{self.build_open_ports_table_slack(issue.issue_details.perms)}",
                            owner=owner,
                            account_id=account_id,
                            bu=bu, product=product,
                        )
                        IssueOperations.set_status_updated(ddb_table, issue)
                    else:
                        logging.debug(f"No changes for '{group_name} / {group_id}'")
                # issue has not been reported yet
                else:
                    logging.debug(f"Reporting security group '{group_name} / {group_id}' issue")

                    status = RestrictionStatus(issue.issue_details.status)
                    # if owner/bu/product tags exist on security group - use it
                    group_owner = tags.get("owner", None)
                    group_bu = tags.get("bu", None)
                    group_product = tags.get("product", None)

                    open_port_details = self.build_open_ports_table_jira(issue.issue_details.perms)

                    account_details = (f"*Risk*: High\n\n"
                                       f"*Account Name*: {account_name}\n"
                                       f"*Account ID*: {account_id}\n"
                                       f"*SG Name*: {group_name}\n"
                                       f"*SG ID*: {group_id}\n"
                                       f"*Region*: {group_region}\n")

                    account_details += f"*VPC*: {group_vpc_id}\n\n" if group_vpc_id else "\n"

                    account = Account(id=account_id,
                                      name=account_name,
                                      region=group_region,
                                      role_name=self.config.aws.role_name_reporting)
                    ec2_client = account.client("ec2") if account.session is not None else None

                    sg_instance_details = ec2_owner = ec2_bu = ec2_product = None
                    sg_in_use = sg_in_use_ec2 = sg_in_use_elb = sg_in_use_rds = None
                    sg_public = sg_blind_public = False

                    rds_client = account.client("rds") if account.session is not None else None
                    elb_client = account.client("elb") if account.session is not None else None
                    elbv2_client = account.client("elbv2") if account.session is not None else None

                    iam_client = account.client("iam") if account.session is not None else None

                    rds_instance_details = elb_instance_details = None

                    if ec2_client is not None:
                        ec2_instances = EC2Operations.get_instance_details_of_sg_associated(ec2_client, group_id)
                        sg_instance_details, instance_profile_details,\
                            sg_in_use_ec2, sg_public, sg_blind_public, \
                            ec2_owner, ec2_bu, ec2_product = self.build_instances_table(iam_client, ec2_instances)

                    if elb_client is not None and elbv2_client is not None:
                        try:
                            elb_instances = EC2Operations.get_elb_details_of_sg_associated(elb_client, elbv2_client, group_id)
                            elb_instance_details, sg_in_use_elb = self.build_elb_instances_table(elb_instances)
                        except Exception:
                            logging.exception(f"Failed to build ELB details for '{group_name} / {group_id}' in {account}")

                    if rds_client is not None:
                        try:
                            rds_instances = RDSOperations.get_rds_instance_details_of_sg_associated(rds_client, group_id)
                            rds_instance_details, sg_in_use_rds = self.build_rds_instances_table(rds_instances)
                        except Exception:
                            logging.exception(f"Failed to build RDS details for '{group_name} / {group_id}' in {account}")

                    sg_in_use = sg_in_use_ec2 or sg_in_use_elb or sg_in_use_rds

                    owner = group_owner if group_owner is not None else ec2_owner
                    bu = group_bu if group_bu is not None else ec2_bu
                    product = group_product if group_product is not None else ec2_product

                    if bu is None:
                        bu = self.config.get_bu_by_name(group_name)

                    source_description = f"has {status.value} status"
                    if status == RestrictionStatus.OpenCompletely:
                        source_description = "allows access from any IP address (0.0.0.0/0, ::/0)"
                    elif status == RestrictionStatus.OpenPartly:
                        source_description = "allows access from some definite public ip addresses or networks"

                    if sg_public:
                        priority = "Critical"
                        summary_status = "Internet"
                        issue_description = (f"Security group has EC2 instances in public subnets "
                                             f"with public IP address attached and "
                                             f"{source_description} "
                                             f"for following ports:\n")
                        threat = (
                            f"*Threat*: "
                            f"Instances associated with this security group are accessible via public route over Internet and "
                            f"have ingress rules which allows access to critical services which should be accessible "
                            f"only from VPN or Direct Connect. Accessing these instances via Internet can lead to leakage "
                            f"to third parties of login credentials for such services as databases/remote access."
                            f"Open and Unrestricted access from Internet increases opportunities for "
                            f"malicious activity from public internet which can potentially result into "
                            f"hacking, denial-of-service attacks, loss of data, etc. This also provides "
                            f"an ingress point to the attackers to gain backdoor access within the other "
                            f"critical services.\n"
                        )
                    elif sg_blind_public:
                        priority = "Critical"
                        summary_status = "Internet"
                        issue_description = (f"Security group has EC2 instances in private subnets "
                                             f"with public IP address attached and "
                                             f"{source_description} "
                                             f"for following ports:\n")
                        threat = (f"*Threat*: "
                                  f"Instances listed below can be probed by external attack vectors and "
                                  f"make them vulnerable to blind injection based attacks, as although "
                                  f"the EC2 instances is in a private subnet, if security group and NACL "
                                  f"are allowing access from the internet incoming, traffic will reach "
                                  f"instances when someone is probing the public IP of the instances. "
                                  f"However, there will be no return traffic due to the lack of an IGW.\n")
                    elif not sg_in_use:
                        priority = "Minor"
                        summary_status = "Unused"
                        issue_description = (f"Security group has no EC2 instances attached and "
                                             f"{source_description} "
                                             f"for following ports:\n")
                        threat = (f"*Threat*: "
                                  f"An unused SG can be leveraged to gain control/access within the network "
                                  f"if attached to any exposed instance. This unrestricted access increases "
                                  f"opportunities for malicious activity (hacking, denial-of-service attacks, "
                                  f"loss of data).\n")
                    else:
                        priority = "Major"
                        summary_status = "Intranet"
                        issue_description = (
                            f"Security group has EC2 instances in in private subnets and "
                            f"{source_description} "
                            f"for following ports:\n")
                        threat = (f"*Threat*: "
                                  f"Open access within the network not only provides unrestricted access to "
                                  f"other servers but increases opportunities for malicious activity (hacking, "
                                  f"denial-of-service attacks, loss of data) if attacker gains access to the "
                                  f"services within the network, thus providing lateral movement.\n")

                    tags_table = JiraOperations.build_tags_table(tags)

                    issue_description = (
                        f"{issue_description}"
                        f"{open_port_details}"
                        f"{threat}"
                        f"{account_details}")

                    if status == RestrictionStatus.OpenCompletely:
                        auto_remediation_date = (self.config.now + self.config.sg.issue_retention_date).date()
                        issue_description += f"\n{{color:red}}*Auto-Remediation Date*: {auto_remediation_date}{{color}}\n\n"

                    issue_description += f"{tags_table}"

                    issue_description += f"{sg_instance_details if sg_instance_details else ''}"

                    issue_description += f"{rds_instance_details if rds_instance_details else ''}"

                    issue_description += f"{elb_instance_details if elb_instance_details else ''}"

                    issue_description += f"{instance_profile_details if instance_profile_details else ''}"

                    issue_description += (
                        f"*Recommendation*: "
                        f"Allow access only for a minimum set of required ip addresses/ranges from [RFC1918|https://tools.ietf.org/html/rfc1918]. "
                    )

                    if self.config.whitelisting_procedure_url:
                        issue_description += (f"For any other exceptions, please follow the [whitelisting procedure|{self.config.whitelisting_procedure_url}] "
                                              f"and provide a strong business reasoning. ")

                    issue_description += f"Be sure to delete overly permissive rules after creating rules that are more restrictive.\n"

                    issue_summary = (f"{summary_status} open security group '{group_name}'"
                                     f" in '{account_name} / {account_id}' account{' [' + bu + ']' if bu else ''}")

                    try:
                        response = jira.add_issue(
                            issue_summary=issue_summary, issue_description=issue_description,
                            priority=priority, labels=["insecure-services"],
                            owner=owner,
                            account_id=account_id,
                            bu=bu, product=product,
                        )
                    except Exception:
                        logging.exception("Failed to create jira ticket")
                        continue

                    if response is not None:
                        issue.jira_details.ticket = response.ticket_id
                        issue.jira_details.ticket_assignee_id = response.ticket_assignee_id

                    issue.jira_details.public = sg_public
                    issue.jira_details.blind_public = sg_blind_public
                    issue.jira_details.in_use = sg_in_use
                    issue.jira_details.owner = owner
                    issue.jira_details.business_unit = bu
                    issue.jira_details.product = product

                    slack.report_issue(
                        msg=f"Discovered {issue_summary}"
                            f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}"
                            f"\n"
                            f"{self.build_open_ports_table_slack(issue.issue_details.perms)}",
                        owner=owner,
                        account_id=account_id,
                        bu=bu, product=product,
                    )

                    IssueOperations.set_status_reported(ddb_table, issue)
Ejemplo n.º 27
0
    def clean_sqs_policy_permissions(self):
        """ Class method to clean SQS queues which are violating aws best practices """
        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(
            self.config.sqspolicy.ddb_table_name)
        backup_bucket = config.aws.s3_backup_bucket

        retention_period = self.config.sqspolicy.remediation_retention_period

        jira = JiraReporting(self.config)
        slack = SlackNotification(self.config)

        for account_id, account_name in self.config.aws.accounts.items():
            logging.debug(f"Checking '{account_name} / {account_id}'")
            issues = IssueOperations.get_account_open_issues(
                ddb_table, account_id, SQSPolicyIssue)
            for issue in issues:
                queue_url = issue.issue_id
                queue_name = issue.issue_details.name
                queue_region = issue.issue_details.region

                in_whitelist = self.config.sqspolicy.in_whitelist(
                    account_id, queue_url)

                if in_whitelist:
                    logging.debug(f"Skipping {queue_name} (in whitelist)")
                    continue

                if issue.timestamps.reported is None:
                    logging.debug(
                        f"Skipping '{queue_name}' (was not reported)")
                    continue

                if issue.timestamps.remediated is not None:
                    logging.debug(
                        f"Skipping {queue_name} (has been already remediated)")
                    continue

                updated_date = issue.timestamp_as_datetime
                no_of_days_issue_created = (self.config.now -
                                            updated_date).days

                if no_of_days_issue_created >= retention_period:
                    owner = issue.jira_details.owner
                    bu = issue.jira_details.business_unit
                    product = issue.jira_details.product

                    try:
                        account = Account(
                            id=account_id,
                            name=account_name,
                            region=issue.issue_details.region,
                            role_name=self.config.aws.role_name_reporting)
                        if account.session is None:
                            continue

                        checker = SQSPolicyChecker(account=account)
                        checker.check(queues=[queue_url])
                        queue = checker.get_queue(queue_name)
                        if queue is None:
                            logging.debug(
                                f"Queue {queue_name} was removed by user")
                        elif not queue.public:
                            logging.debug(
                                f"Queue {queue.name} policy issue was remediated by user"
                            )
                        else:
                            logging.debug(f"Remediating '{queue.name}' policy")

                            backup_path = queue.backup_policy_s3(
                                main_account.client("s3"), backup_bucket)
                            remediation_succeed = True
                            if queue.restrict_policy():
                                comment = (
                                    f"Policy backup was saved to "
                                    f"[{backup_path}|https://s3.console.aws.amazon.com/s3/object/{backup_bucket}/{backup_path}]. "
                                    f"Queue '{queue.name}' policy issue "
                                    f"in '{account_name} / {account_id}' account, '{queue_region}' region "
                                    f"was remediated by hammer")
                            else:
                                remediation_succeed = False
                                comment = (
                                    f"Failed to remediate queue '{queue.name}' policy issue "
                                    f"in '{account_name} / {account_id}' account, '{queue_region}' region "
                                    f"due to some limitations. Please, check manually"
                                )

                            jira.remediate_issue(
                                ticket_id=issue.jira_details.ticket,
                                comment=comment,
                                reassign=remediation_succeed,
                            )
                            slack.report_issue(
                                msg=f"{comment}"
                                f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                                owner=owner,
                                account_id=account_id,
                                bu=bu,
                                product=product,
                            )
                            IssueOperations.set_status_remediated(
                                ddb_table, issue)
                    except Exception:
                        logging.exception(
                            f"Error occurred while updating queue '{queue_url}' policy "
                            f"in '{account_name} / {account_id}', '{queue_region}' region"
                        )
                else:
                    logging.debug(
                        f"Skipping '{queue_name}' "
                        f"({retention_period - no_of_days_issue_created} days before remediation)"
                    )
Ejemplo n.º 28
0
def lambda_handler(event, context):
    """ Lambda handler to evaluate s3 buckets acl """
    set_logging(level=logging.INFO)

    try:
        payload = json.loads(event["Records"][0]["Sns"]["Message"])
        account_id = payload['account_id']
        account_name = payload['account_name']
        # if request_id is present in payload then this lambda was called from the API
        request_id = payload.get('request_id', None)
    except Exception:
        logging.exception(f"Failed to parse event\n{event}")
        return

    try:
        config = Config()

        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(
            config.s3acl.ddb_table_name)

        account = Account(id=account_id,
                          name=account_name,
                          role_name=config.aws.role_name_identification)
        if account.session is None:
            return

        logging.debug(f"Checking for public S3 ACLs in {account}")

        # existing open issues for account to check if resolved
        open_issues = IssueOperations.get_account_open_issues(
            ddb_table, account_id, S3AclIssue)
        # make dictionary for fast search by id
        # and filter by current region
        open_issues = {issue.issue_id: issue for issue in open_issues}
        logging.debug(f"S3 in DDB:\n{open_issues.keys()}")

        checker = S3BucketsAclChecker(account=account)
        if not checker.check():
            return

        for bucket in checker.buckets:
            logging.debug(f"Checking {bucket.name}")
            if bucket.public:
                issue = S3AclIssue(account_id, bucket.name)
                issue.issue_details.owner = bucket.owner
                issue.issue_details.public_acls = bucket.get_public_acls()
                issue.issue_details.tags = bucket.tags
                if config.s3acl.in_whitelist(account_id, bucket.name):
                    issue.status = IssueStatus.Whitelisted
                else:
                    issue.status = IssueStatus.Open
                logging.debug(f"Setting {bucket.name} status {issue.status}")
                IssueOperations.update(ddb_table, issue)
                # remove issue id from issues_list_from_db (if exists)
                # as we already checked it
                open_issues.pop(bucket.name, None)

        logging.debug(f"S3 in DDB:\n{open_issues.keys()}")
        # all other unresolved issues in DDB are for removed/remediated buckets
        for issue in open_issues.values():
            IssueOperations.set_status_resolved(ddb_table, issue)
        if request_id:
            api_table = main_account.resource("dynamodb").Table(
                config.api.ddb_table_name)
            DDB.track_progress(api_table, request_id)
    except Exception:
        logging.exception(
            f"Failed to check s3 acls for '{account_id} ({account_name})'")
        return

    logging.debug(f"Checked s3 acls for '{account_id} ({account_name})'")
    def clean_s3bucket_policy_permissions(self, batch=False):
        """ Class method to clean S3 buckets which are violating aws best practices """
        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(
            self.config.s3policy.ddb_table_name)
        backup_bucket = config.aws.s3_backup_bucket

        retention_period = self.config.s3policy.remediation_retention_period

        jira = JiraReporting(self.config)
        slack = SlackNotification(self.config)

        for account_id, account_name in self.config.s3policy.remediation_accounts.items(
        ):
            logging.debug(f"Checking '{account_name} / {account_id}'")
            issues = IssueOperations.get_account_open_issues(
                ddb_table, account_id, S3PolicyIssue)
            for issue in issues:
                bucket_name = issue.issue_id

                in_whitelist = self.config.s3policy.in_whitelist(
                    account_id, bucket_name)
                #in_fixlist = self.config.s3policy.in_fixnow(account_id, bucket_name)

                if in_whitelist:
                    logging.debug(f"Skipping {bucket_name} (in whitelist)")

                    # Adding label with "whitelisted" to jira ticket.
                    jira.add_label(ticket_id=issue.jira_details.ticket,
                                   label=IssueStatus.Whitelisted.value)
                    continue
                # if not in_fixlist:
                #     logging.debug(f"Skipping {bucket_name} (not in fixlist)")
                #     continue

                if issue.timestamps.reported is None:
                    logging.debug(
                        f"Skipping '{bucket_name}' (was not reported)")
                    continue

                if issue.timestamps.remediated is not None:
                    logging.debug(
                        f"Skipping {bucket_name} (has been already remediated)"
                    )
                    continue

                updated_date = issue.timestamp_as_datetime
                no_of_days_issue_created = (self.config.now -
                                            updated_date).days

                if no_of_days_issue_created >= retention_period:
                    owner = issue.jira_details.owner
                    bu = issue.jira_details.business_unit
                    product = issue.jira_details.product

                    try:
                        account = Account(
                            id=account_id,
                            name=account_name,
                            role_name=self.config.aws.role_name_reporting)
                        if account.session is None:
                            continue

                        checker = S3BucketsPolicyChecker(account=account)
                        checker.check(buckets=[bucket_name])
                        s3bucket = checker.get_bucket(bucket_name)
                        if s3bucket is None:
                            logging.debug(
                                f"Bucket {s3bucket.name} was removed by user")
                        elif not s3bucket.public_by_policy:
                            logging.debug(
                                f"Bucket {s3bucket.name} policy issue was remediated by user"
                            )
                        else:
                            if not batch and \
                               not confirm(f"Do you want to remediate '{bucket_name}' S3 bucket policy", False):
                                continue

                            logging.debug(
                                f"Remediating '{s3bucket.name}' policy")

                            backup_path = s3bucket.backup_policy_s3(
                                main_account.client("s3"), backup_bucket)
                            remediation_succeed = True
                            if s3bucket.restrict_policy():
                                comment = (
                                    f"Policy backup was saved to "
                                    f"[{backup_path}|https://s3.console.aws.amazon.com/s3/object/{backup_bucket}/{backup_path}]. "
                                    f"Bucket '{s3bucket.name}' policy issue "
                                    f"in '{account_name} / {account_id}' account "
                                    f"was remediated by hammer")
                            else:
                                remediation_succeed = False
                                comment = (
                                    f"Failed to remediate bucket '{s3bucket.name}' policy issue "
                                    f"in '{account_name} / {account_id}' account "
                                    f"due to some limitations. Please, check manually"
                                )

                            jira.remediate_issue(
                                ticket_id=issue.jira_details.ticket,
                                comment=comment,
                                reassign=remediation_succeed,
                            )
                            slack.report_issue(
                                msg=f"{comment}"
                                f"{' (' + jira.ticket_url(issue.jira_details.ticket) + ')' if issue.jira_details.ticket else ''}",
                                owner=owner,
                                account_id=account_id,
                                bu=bu,
                                product=product,
                            )
                            IssueOperations.set_status_remediated(
                                ddb_table, issue)
                    except Exception:
                        logging.exception(
                            f"Error occurred while updating bucket '{bucket_name}' policy "
                            f"in '{account_name} / {account_id}'")
                else:
                    logging.debug(
                        f"Skipping '{bucket_name}' "
                        f"({retention_period - no_of_days_issue_created} days before remediation)"
                    )
Ejemplo n.º 30
0
def lambda_handler(event, context):
    """ Lambda handler to evaluate insecure services """
    set_logging(level=logging.INFO)

    try:
        payload = json.loads(event["Records"][0]["Sns"]["Message"])
        account_id = payload['account_id']
        account_name = payload['account_name']
        # get the last region from the list to process
        region = payload['regions'].pop()
    except Exception:
        logging.exception(f"Failed to parse event\n{event}")
        return

    try:
        config = Config()

        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(
            config.sg.ddb_table_name)

        account = Account(id=account_id,
                          name=account_name,
                          region=region,
                          role_name=config.aws.role_name_identification)
        if account.session is None:
            return

        logging.debug(f"Checking for insecure services in {account}")

        # existing open issues for account to check if resolved
        open_issues = IssueOperations.get_account_open_issues(
            ddb_table, account_id, SecurityGroupIssue)
        # make dictionary for fast search by id
        # and filter by current region
        open_issues = {
            issue.issue_id: issue
            for issue in open_issues if issue.issue_details.region == region
        }
        logging.debug(f"Security groups in DDB:\n{open_issues.keys()}")

        checker = SecurityGroupsChecker(
            account=account, restricted_ports=config.sg.restricted_ports)
        if checker.check():
            for sg in checker.groups:
                logging.debug(f"Checking {sg.name} ({sg.id})")
                if not sg.restricted:
                    # TODO: move instances detection for security group from reporting to identification
                    #ec2_instances = EC2Operations.get_instance_details_of_sg_associated(account.client("ec2"), sg.id)
                    #logging.debug(f"associated ec2 instances: {ec2_instances}")
                    issue = SecurityGroupIssue(account_id, sg.id)
                    issue.issue_details.name = sg.name
                    issue.issue_details.region = sg.account.region
                    issue.issue_details.tags = sg.tags
                    issue.issue_details.status = sg.status.value
                    for perm in sg.permissions:
                        for ip_range in perm.ip_ranges:
                            if not ip_range.restricted:
                                issue.add_perm(perm.protocol, perm.from_port,
                                               perm.to_port, ip_range.cidr,
                                               ip_range.status)
                    if config.sg.in_whitelist(
                            account_id, sg.name) or config.sg.in_whitelist(
                                account_id, sg.id):
                        issue.status = IssueStatus.Whitelisted
                    else:
                        issue.status = IssueStatus.Open
                    logging.debug(f"Setting {sg.id} status {issue.status}")
                    IssueOperations.update(ddb_table, issue)
                    # remove issue id from issues_list_from_db (if exists)
                    # as we already checked it
                    open_issues.pop(sg.id, None)

            logging.debug(f"Security groups in DDB:\n{open_issues.keys()}")
            # all other unresolved issues in DDB are for removed/remediated security groups
            for issue in open_issues.values():
                IssueOperations.set_status_resolved(ddb_table, issue)
    except Exception:
        logging.exception(
            f"Failed to check insecure services in '{region}' for '{account_id} ({account_name})'"
        )

    # push SNS messages until the list with regions to check is empty
    if len(payload['regions']) > 0:
        try:
            Sns.publish(payload["sns_arn"], payload)
        except Exception:
            logging.exception("Failed to chain insecure services checking")

    logging.debug(
        f"Checked insecure services in '{region}' for '{account_id} ({account_name})'"
    )