Пример #1
0
def lambda_handler(event, context):
    """ Lambda handler to initiate to find inactive keys for IAM users """
    set_logging(level=logging.INFO)
    logging.debug("Initiating IAM user inactive keys checking")

    try:
        sns_arn = os.environ["SNS_IAM_USER_INACTIVE_KEYS_ARN"]
        config = Config()

        if not config.iamUserInactiveKeys.enabled:
            logging.debug("IAM user inactive keys checking disabled")
            return

        logging.debug(
            "Iterating over each account to initiate IAM user inactive keys check"
        )
        for account_id, account_name in config.iamUserInactiveKeys.accounts.items(
        ):
            payload = {
                "account_id": account_id,
                "account_name": account_name,
            }
            logging.debug(
                f"Initiating IAM user inactive keys checking for '{account_name}'"
            )
            Sns.publish(sns_arn, payload)
    except Exception:
        logging.exception(
            "Error occurred while initiation of IAM user inactive keys check")
        return

    logging.debug("IAM user inactive keys checking initiation done")
Пример #2
0
def lambda_handler(event, context):
    """ Lambda handler to initiate to find SQS public access in policy """
    set_logging(level=logging.INFO)
    logging.debug("Initiating SQS policies checking")

    try:
        sns_arn = os.environ["SNS_ARN"]
        config = Config()

        if not config.sqspolicy.enabled:
            logging.debug("SQS policies checking disabled")
            return

        logging.debug(
            "Iterating over each account to initiate SQS policies check")
        for account_id, account_name in config.sqspolicy.accounts.items():
            payload = {
                "account_id": account_id,
                "account_name": account_name,
                "regions": config.aws.regions,
                "sns_arn": sns_arn
            }
            logging.debug(
                f"Initiating SQS policies checking for '{account_name}'")
            Sns.publish(sns_arn, payload)

    except Exception:
        logging.exception(
            "Error occurred while initiation of SQS policy checking")
        return

    logging.debug("SQS policies checking initiation done")
def lambda_handler(event, context):
    """ Lambda handler to initiate to find S3 bucket public access in ACL """
    set_logging(level=logging.INFO)
    logging.debug("Initiating S3 acls checking")

    try:
        sns_arn = os.environ["SNS_S3_ACL_ARN"]
        config = Config()

        if not config.s3acl.enabled:
            logging.debug("S3 acls checking disabled")
            return

        logging.debug("Iterating over each account to initiate s3 acls check")
        for account_id, account_name in config.s3acl.accounts.items():
            payload = {
                "account_id": account_id,
                "account_name": account_name,
            }
            logging.debug(f"Initiating s3 acls checking for '{account_name}'")
            Sns.publish(sns_arn, payload)
    except Exception:
        logging.exception("Error occurred while initiation of S3 acl checking")
        return

    logging.debug("S3 acls checking initiation done")
def lambda_handler(event, context):
    """ Lambda handler to initiate to find public RDS snapshots """
    set_logging(level=logging.INFO)
    logging.debug("Initiating public RDS snapshots checking")

    try:
        sns_arn = os.environ["SNS_ARN"]
        config = Config()

        if not config.rdsSnapshot.enabled:
            logging.debug("Public RDS snapshots checking disabled")
            return

        logging.debug("Iterating each account to initiate RDS snapshots checking")
        for account_id, account_name in config.rdsSnapshot.accounts.items():
            payload = {"account_id": account_id,
                       "account_name": account_name,
                       "regions": config.aws.regions,
                       "sns_arn": sns_arn
                      }
            logging.debug(f"Initiating public RDS snapshots checking for '{account_name}'")
            Sns.publish(sns_arn, payload)
    except Exception:
        logging.exception("Error occurred while initiation of public RDS snapshots checking")
        return

    logging.debug("Public RDS snapshot checking initiation done")
Пример #5
0
def lambda_handler(event, context):
    """ Lambda handler to initiate to find security groups unrestricted access """
    set_logging(level=logging.INFO)
    logging.debug("Initiating CloudTrail checking")

    try:
        sns_arn = os.environ["SNS_ARN"]
        config = Config()

        if not config.cloudtrails.enabled:
            logging.debug("CloudTrail checking disabled")
            return

        logging.debug(
            "Iterating over each account to initiate CloudTrail check")
        for account_id, account_name in config.cloudtrails.accounts.items():
            payload = {
                "account_id": account_id,
                "account_name": account_name,
                "regions": config.aws.regions,
                "sns_arn": sns_arn
            }
            logging.debug(
                f"Initiating CloudTrail checking for '{account_name}'")
            Sns.publish(sns_arn, payload)
    except Exception:
        logging.exception(
            "Error occurred while initiation of CloudTrail checking")
        return

    logging.debug("CloudTrail checking initiation done")
Пример #6
0
def lambda_handler(event, context):
    """ Lambda handler to initiate to find unencrypted EBS volumes """
    set_logging(level=logging.INFO)
    logging.debug("Initiating unencrypted EBS volumes checking")

    try:
        sns_arn = os.environ["SNS_EBS_VOLUMES_ARN"]
        config = Config()

        if not config.ebsVolume.enabled:
            logging.debug("Unencrypted EBS volumes checking disabled")
            return

        logging.debug("Iterating over each account to initiate unencrypted EBS volumes checking")
        for account_id, account_name in config.ebsVolume.accounts.items():
            payload = {"account_id": account_id,
                       "account_name": account_name,
                       "regions": config.aws.regions,
                       "sns_arn": sns_arn
                      }
            logging.debug(f"Initiating unencrypted EBS volume checking for '{account_name}'")
            Sns.publish(sns_arn, payload)
    except Exception:
        logging.exception("Error occurred while initiation of unencrypted EBS volumes checking")
        return

    logging.debug("Unencrypted EBS volume checking initiation done")
def lambda_handler(event, context):
    """ Lambda handler to evaluate public ami issues"""
    set_logging(level=logging.INFO)

    try:
        payload = json.loads(event["Records"][0]["Sns"]["Message"])
        account_id = payload['account_id']
        account_name = payload['account_name']
        # get the last region from the list to process
        region = payload['regions'].pop()
        # if request_id is present in payload then this lambda was called from the API
        request_id = payload.get('request_id', None)
    except Exception:
        logging.exception(f"Failed to parse event\n{event}")
        return

    try:
        config = Config()

        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(
            config.publicAMIs.ddb_table_name)

        account = Account(id=account_id,
                          name=account_name,
                          region=region,
                          role_name=config.aws.role_name_identification)
        if account.session is None:
            return

        logging.debug(f"Checking for Public AMI issues for {account}")

        # existing open issues for account to check if resolved
        open_issues = IssueOperations.get_account_open_issues(
            ddb_table, account_id, PublicAMIIssue)
        # make dictionary for fast search by id
        # and filter by current region
        open_issues = {
            issue.issue_id: issue
            for issue in open_issues if issue.issue_details.region == region
        }
        logging.debug(f"Public AMIs in DDB:\n{open_issues.keys()}")

        checker = PublicAMIChecker(account=account)
        if checker.check():
            for ami in checker.amis:
                logging.debug(f"Checking {ami.id}")
                if ami.public_access:
                    issue = PublicAMIIssue(account_id, ami.id)
                    issue.issue_details.tags = ami.tags
                    issue.issue_details.name = ami.name
                    issue.issue_details.region = region
                    if config.publicAMIs.in_whitelist(account_id, ami.id):
                        issue.status = IssueStatus.Whitelisted
                    else:
                        issue.status = IssueStatus.Open
                    logging.debug(
                        f"Setting {ami.id}/{ami.id} status {issue.status}")
                    IssueOperations.update(ddb_table, issue)
                    # remove issue id from issues_list_from_db (if exists)
                    # as we already checked it
                    open_issues.pop(ami.id, None)

            logging.debug(f"Public AMIs in DDB:\n{open_issues.keys()}")
            # all other unresolved issues in DDB are for removed/remediated keys
            for issue in open_issues.values():
                IssueOperations.set_status_resolved(ddb_table, issue)
        # track the progress of API request to scan specific account/region/feature
        if request_id:
            api_table = main_account.resource("dynamodb").Table(
                config.api.ddb_table_name)
            DDB.track_progress(api_table, request_id)
    except Exception:
        logging.exception(
            f"Failed to check AMI public access for '{account_id} ({account_name})'"
        )
        return

    # push SNS messages until the list with regions to check is empty
    if len(payload['regions']) > 0:
        try:
            Sns.publish(payload["sns_arn"], payload)
        except Exception:
            logging.exception("Failed to chain public AMI checking")

    logging.debug(
        f"Checked AMI public access for '{account_id} ({account_name})'")
Пример #8
0
def lambda_handler(event, context):
    """ Lambda handler to evaluate public EBS snapshots """
    set_logging(level=logging.INFO)

    try:
        payload = json.loads(event["Records"][0]["Sns"]["Message"])
        account_id = payload['account_id']
        account_name = payload['account_name']
        # get the last region from the list to process
        region = payload['regions'].pop()
    except Exception:
        logging.exception(f"Failed to parse event\n{event}")
        return

    try:
        config = Config()

        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(
            config.ebsSnapshot.ddb_table_name)

        account = Account(id=account_id,
                          name=account_name,
                          region=region,
                          role_name=config.aws.role_name_identification)
        if account.session is None:
            return

        logging.debug(f"Checking for public EBS snapshots in {account}")

        # existing open issues for account to check if resolved
        open_issues = IssueOperations.get_account_open_issues(
            ddb_table, account_id, EBSPublicSnapshotIssue)
        # make dictionary for fast search by id
        # and filter by current region
        open_issues = {
            issue.issue_id: issue
            for issue in open_issues if issue.issue_details.region == region
        }
        logging.debug(f"Public EBS snapshots in DDB:\n{open_issues.keys()}")

        checker = EBSPublicSnapshotsChecker(account=account)
        if checker.check():
            for snapshot in checker.snapshots:
                if snapshot.public:
                    issue = EBSPublicSnapshotIssue(account_id, snapshot.id)
                    issue.issue_details.region = snapshot.account.region
                    issue.issue_details.volume_id = snapshot.volume_id
                    issue.issue_details.tags = snapshot.tags
                    if config.ebsSnapshot.in_whitelist(account_id,
                                                       snapshot.id):
                        issue.status = IssueStatus.Whitelisted
                    else:
                        issue.status = IssueStatus.Open
                    logging.debug(
                        f"Setting {snapshot.id} status {issue.status}")
                    IssueOperations.update(ddb_table, issue)
                    # remove issue id from issues_list_from_db (if exists)
                    # as we already checked it
                    open_issues.pop(snapshot.id, None)

            logging.debug(
                f"Public EBS snapshots in DDB:\n{open_issues.keys()}")
            # all other unresolved issues in DDB are for removed/remediated EBS snapshots
            for issue in open_issues.values():
                IssueOperations.set_status_resolved(ddb_table, issue)
    except Exception:
        logging.exception(
            f"Failed to check public EBS snapshots in '{region}' for '{account_id} ({account_name})'"
        )

    # push SNS messages until the list with regions to check is empty
    if len(payload['regions']) > 0:
        try:
            Sns.publish(payload["sns_arn"], payload)
        except Exception:
            logging.exception("Failed to chain public EBS snapshots checking")

    logging.debug(
        f"Checked public EBS snapshots in '{region}' for '{account_id} ({account_name})'"
    )
Пример #9
0
def lambda_handler(event, context):
    """ Lambda handler to evaluate insecure services """
    set_logging(level=logging.INFO)

    try:
        payload = json.loads(event["Records"][0]["Sns"]["Message"])
        account_id = payload['account_id']
        account_name = payload['account_name']
        # get the last region from the list to process
        region = payload['regions'].pop()
    except Exception:
        logging.exception(f"Failed to parse event\n{event}")
        return

    try:
        config = Config()

        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(
            config.sg.ddb_table_name)

        account = Account(id=account_id,
                          name=account_name,
                          region=region,
                          role_name=config.aws.role_name_identification)
        if account.session is None:
            return

        logging.debug(f"Checking for insecure services in {account}")

        # existing open issues for account to check if resolved
        open_issues = IssueOperations.get_account_open_issues(
            ddb_table, account_id, SecurityGroupIssue)
        # make dictionary for fast search by id
        # and filter by current region
        open_issues = {
            issue.issue_id: issue
            for issue in open_issues if issue.issue_details.region == region
        }
        logging.debug(f"Security groups in DDB:\n{open_issues.keys()}")

        checker = SecurityGroupsChecker(
            account=account, restricted_ports=config.sg.restricted_ports)
        if checker.check():
            for sg in checker.groups:
                logging.debug(f"Checking {sg.name} ({sg.id})")
                if not sg.restricted:
                    # TODO: move instances detection for security group from reporting to identification
                    #ec2_instances = EC2Operations.get_instance_details_of_sg_associated(account.client("ec2"), sg.id)
                    #logging.debug(f"associated ec2 instances: {ec2_instances}")
                    issue = SecurityGroupIssue(account_id, sg.id)
                    issue.issue_details.name = sg.name
                    issue.issue_details.region = sg.account.region
                    issue.issue_details.tags = sg.tags
                    issue.issue_details.status = sg.status.value
                    for perm in sg.permissions:
                        for ip_range in perm.ip_ranges:
                            if not ip_range.restricted:
                                issue.add_perm(perm.protocol, perm.from_port,
                                               perm.to_port, ip_range.cidr,
                                               ip_range.status)
                    if config.sg.in_whitelist(
                            account_id, sg.name) or config.sg.in_whitelist(
                                account_id, sg.id):
                        issue.status = IssueStatus.Whitelisted
                    else:
                        issue.status = IssueStatus.Open
                    logging.debug(f"Setting {sg.id} status {issue.status}")
                    IssueOperations.update(ddb_table, issue)
                    # remove issue id from issues_list_from_db (if exists)
                    # as we already checked it
                    open_issues.pop(sg.id, None)

            logging.debug(f"Security groups in DDB:\n{open_issues.keys()}")
            # all other unresolved issues in DDB are for removed/remediated security groups
            for issue in open_issues.values():
                IssueOperations.set_status_resolved(ddb_table, issue)
    except Exception:
        logging.exception(
            f"Failed to check insecure services in '{region}' for '{account_id} ({account_name})'"
        )

    # push SNS messages until the list with regions to check is empty
    if len(payload['regions']) > 0:
        try:
            Sns.publish(payload["sns_arn"], payload)
        except Exception:
            logging.exception("Failed to chain insecure services checking")

    logging.debug(
        f"Checked insecure services in '{region}' for '{account_id} ({account_name})'"
    )
Пример #10
0
def start_scan(account_id, regions, security_features, tags, ids):
    config = Config()

    account_name = config.aws.accounts.get(account_id, None)

    if not account_id:
        return bad_request(text="account_id is required parameter")

    if account_name is None:
        return bad_request(text=f"account '{account_id}' is not defined")

    valid_security_features = [module.section for module in config.modules]
    for security_feature in security_features:
        if security_feature not in valid_security_features:
            return bad_request(
                text=
                f"wrong security feature - '{security_feature}', available choices - {valid_security_features}"
            )

    if not security_features:
        security_features = valid_security_features

    all_regions = config.aws.regions

    for region in regions:
        if region not in all_regions:
            return bad_request(text=f"Region '{region} is not supported")
    # empty list means we want to scan all supported regions
    if not regions:
        regions = all_regions

    if ids is not None and not isinstance(ids, list):
        return bad_request(text=f"'ids' parameter must be list")

    if tags is not None and not isinstance(tags, dict):
        return bad_request(text=f"'tags' parameter must be dict")

    main_account = Account(region=config.aws.region)
    api_table = main_account.resource("dynamodb").Table(
        config.api.ddb_table_name)
    to_scan = []
    for security_feature in security_features:
        accounts = config.get_module_config_by_name(security_feature).accounts
        if account_id in accounts:
            to_scan.append(security_feature)
    regional_services = set(to_scan) - set(GLOBAL_SECURITY_FEATURES)
    global_services = set(to_scan).intersection(set(GLOBAL_SECURITY_FEATURES))
    total = len(regional_services) * len(regions) + len(global_services)
    request_params = {
        "account_id": account_id,
        "regions": regions,
        "security_features": to_scan,
        "tags": tags
    }
    request_id = uuid.uuid4().hex

    DDB.add_request(api_table, request_id, request_params, total)

    for security_feature in to_scan:
        topic_name = config.get_module_config_by_name(
            security_feature).sns_topic_name
        topic_arn = get_sns_topic_arn(config, topic_name)
        payload = {
            "account_id": account_id,
            "account_name": account_name,
            "regions": regions,
            "sns_arn": topic_arn,
            "request_id": request_id
        }
        Sns.publish(topic_arn, payload)

    response = {'request_id': request_id}

    return {
        "statusCode":
        200,
        "body":
        json.dumps(response, indent=4)
        if isinstance(response, dict) else response
    }
Пример #11
0
def lambda_handler(event, context):
    """ Lambda Handler to describe cloud trails enabled or not for each region """
    set_logging(level=logging.INFO)

    try:
        payload = json.loads(event["Records"][0]["Sns"]["Message"])
        account_id = payload['account_id']
        account_name = payload['account_name']
        # get the last region from the list to process
        region = payload['regions'].pop()
        # if request_id is present in payload then this lambda was called from the API
        request_id = payload.get('request_id', None)
    except Exception:
        logging.exception(f"Failed to parse event\n{event}")
        return

    try:
        config = Config()

        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(
            config.cloudtrails.ddb_table_name)

        account = Account(id=account_id,
                          name=account_name,
                          region=region,
                          role_name=config.aws.role_name_identification)
        if account.session is None:
            return

        logging.debug(f"Checking for CloudTrail logging issues in {account}")

        # existing open issues for account to check if resolved
        open_issues = IssueOperations.get_account_open_issues(
            ddb_table, account_id, CloudTrailIssue)
        # make dictionary for fast search by id
        # and filter by current region
        open_issues = {
            issue.issue_id: issue
            for issue in open_issues if issue.issue_id == region
        }
        logging.debug(f"CloudTrail region in DDB:\n{open_issues.keys()}")

        checker = CloudTrailChecker(account=account)
        if checker.check():
            if checker.disabled or checker.delivery_errors:
                issue = CloudTrailIssue(account_id, region)
                issue.issue_details.disabled = checker.disabled
                issue.issue_details.delivery_errors = checker.delivery_errors
                issue.add_trails(checker.trails)
                if config.cloudtrails.in_whitelist(account_id, region):
                    issue.status = IssueStatus.Whitelisted
                else:
                    issue.status = IssueStatus.Open
                logging.debug(f"Setting {region} status {issue.status}")
                IssueOperations.update(ddb_table, issue)
            # issue exists in ddb and was fixed
            elif region in open_issues:
                IssueOperations.set_status_resolved(ddb_table,
                                                    open_issues[region])
        # track the progress of API request to scan specific account/region/feature
        if request_id:
            api_table = main_account.resource("dynamodb").Table(
                config.api.ddb_table_name)
            DDB.track_progress(api_table, request_id)
    except Exception:
        logging.exception(
            f"Failed to check CloudTrail in '{region}' for '{account_id} ({account_name})'"
        )

    # push SNS messages until the list with regions to check is empty
    if len(payload['regions']) > 0:
        try:
            Sns.publish(payload["sns_arn"], payload)
        except Exception:
            logging.exception("Failed to chain CloudTrail checking")

    logging.debug(
        f"Checked CloudTrail in '{region}' for '{account_id} ({account_name})'"
    )
Пример #12
0
def lambda_handler(event, context):
    """ Lambda handler to evaluate SQS queue policy """
    set_logging(level=logging.DEBUG)

    try:
        payload = json.loads(event["Records"][0]["Sns"]["Message"])
        account_id = payload['account_id']
        account_name = payload['account_name']
        # get the last region from the list to process
        region = payload['regions'].pop()
        # region = payload['region']
    except Exception:
        logging.exception(f"Failed to parse event\n{event}")
        return

    try:
        config = Config()

        main_account = Account(region=config.aws.region)
        ddb_table = main_account.resource("dynamodb").Table(config.sqspolicy.ddb_table_name)

        account = Account(id=account_id,
                          name=account_name,
                          region=region,
                          role_name=config.aws.role_name_identification)
        if account.session is None:
            return

        logging.debug(f"Checking for public SQS policies in {account}")

        # existing open issues for account to check if resolved
        open_issues = IssueOperations.get_account_open_issues(ddb_table, account_id, SQSPolicyIssue)
        # make dictionary for fast search by id
        # and filter by current region
        open_issues = {issue.issue_id: issue for issue in open_issues if issue.issue_details.region == region}
        logging.debug(f"SQS in DDB:\n{open_issues.keys()}")

        checker = SQSPolicyChecker(account=account)
        if checker.check():
            for queue in checker.queues:
                logging.debug(f"Checking {queue.name}")
                if queue.public:
                    issue = SQSPolicyIssue(account_id, queue.url)
                    issue.issue_details.tags = queue.tags
                    issue.issue_details.name = queue.name
                    issue.issue_details.region = queue.account.region
                    issue.issue_details.policy = queue.policy
                    if config.sqspolicy.in_whitelist(account_id, queue.url):
                        issue.status = IssueStatus.Whitelisted
                    else:
                        issue.status = IssueStatus.Open
                    logging.debug(f"Setting {queue.name} status {issue.status}")
                    IssueOperations.update(ddb_table, issue)
                    # remove issue id from issues_list_from_db (if exists)
                    # as we already checked it
                    open_issues.pop(queue.url, None)

        logging.debug(f"SQS in DDB:\n{open_issues.keys()}")
        # all other unresolved issues in DDB are for removed/remediated queues
        for issue in open_issues.values():
            IssueOperations.set_status_resolved(ddb_table, issue)
    except Exception:
        logging.exception(f"Failed to check SQS policies for '{account_id} ({account_name})'")
        return

    # push SNS messages until the list with regions to check is empty
    if len(payload['regions']) > 0:
        try:
            Sns.publish(payload["sns_arn"], payload)
        except Exception:
            logging.exception("Failed to chain insecure services checking")

    logging.debug(f"Checked SQS policies for '{account_id} ({account_name})'")