Exemplo n.º 1
0
def audit_ebs_snapshots(findings, region):
    json_blob = query_aws(region.account, "ec2-describe-snapshots", region)
    for snapshot in json_blob['Snapshots']:
        try:
            file_json = get_parameter_file(region, 'ec2', 'describe-snapshot-attribute', snapshot['SnapshotId'])
            if file_json is None:
                # Not technically an exception, but an unexpected situation
                findings.add(Finding(
                    region,
                    'EXCEPTION',
                    snapshot,
                    resource_details={'location': 'EBS snapshot has no attributes'}))
                continue
            for attribute in file_json['CreateVolumePermissions']:
                if attribute.get('Group', 'self') != 'self':
                    findings.add(Finding(
                        region,
                        'EBS_SNAPSHOT_PUBLIC',
                        snapshot,
                        resource_details={'Entities allowed to restore': attribute['Group']}))
        except OSError:
            findings.add(Finding(
                region,
                'EXCEPTION',
                None,
                resource_details={
                    'location': 'Could not open EBS snapshot file',
                    'file_name': file_name}))
Exemplo n.º 2
0
def audit_password_policy(findings, region):
    json_blob = query_aws(region.account, "iam-get-account-password-policy", region)
    if json_blob is None or json_blob.get('PasswordPolicy', {}) == {}:
        findings.add(Finding(
            region,
            'PASSWORD_POLICY_NOT_SET',
            None,
            None))
    else:
        if json_blob['PasswordPolicy'].get('MinimumPasswordLength', 0) < 12:
            findings.add(Finding(
                region,
                'PASSWORD_POLICY_CHARACTER_MINIMUM',
                None,
                resource_details={'MinimumPasswordLength': json_blob['PasswordPolicy'].get('MinimumPasswordLength', 0)}))

        lacking_character_requirements = []
        if not json_blob['PasswordPolicy'].get('RequireNumbers', False):
            lacking_character_requirements.append('RequireNumbers')
        if not json_blob['PasswordPolicy'].get('RequireSymbols', False):
            lacking_character_requirements.append('RequireSymbols')
        if not json_blob['PasswordPolicy'].get('RequireLowercaseCharacters', False):
            lacking_character_requirements.append('RequireLowercaseCharacters')
        if not json_blob['PasswordPolicy'].get('RequireUppercaseCharacters', False):
            lacking_character_requirements.append('RequireUppercaseCharacters')
        if len(lacking_character_requirements) > 0:
            findings.add(Finding(
                region,
                'PASSWORD_POLICY_CHARACTER_SET_REQUIREMENTS',
                None,
                resource_details={'Policy lacks': lacking_character_requirements}))
Exemplo n.º 3
0
def audit_glacier(findings, region):
    # Check for publicly accessible vaults.
    json_blob = query_aws(region.account, "glacier-list-vaults", region)
    if json_blob is None:
        # Service not supported in the region
        return

    for vault in json_blob.get("VaultList", []):
        name = vault["VaultName"]

        # Check policy
        policy_file_json = get_parameter_file(
            region, "glacier", "get-vault-access-policy", name
        )
        if policy_file_json is None:
            # No policy
            continue

        # Find the entity we need
        policy_string = policy_file_json["policy"]["Policy"]
        # Load the string value as json
        policy = json.loads(policy_string)
        policy = Policy(policy)
        if policy.is_internet_accessible():
            findings.add(
                Finding(region, "GLACIER_PUBLIC", name, resource_details=policy_string)
            )
Exemplo n.º 4
0
def api_endpoints(accounts, config):
    for account in accounts:
        account = Account(None, account)
        for region_json in get_regions(account):
            region = Region(account, region_json)

            # Look for API Gateway
            json_blob = query_aws(region.account, 'apigateway-get-rest-apis',
                                  region)
            if json_blob is None:
                continue
            for api in json_blob.get('items', []):
                rest_id = api['id']
                deployments = get_parameter_file(region, 'apigateway',
                                                 'get-deployments', rest_id)
                if deployments is None:
                    continue
                for deployment in deployments['items']:
                    deployment_id = deployment['id']
                    stages = get_parameter_file(region, 'apigateway',
                                                'get-stages', rest_id)
                    if stages is None:
                        continue
                    for stage in stages['item']:
                        if stage['deploymentId'] == deployment_id:
                            resources = get_parameter_file(
                                region, 'apigateway', 'get-resources', rest_id)
                            if resources is None:
                                continue
                            for resource in resources['items']:
                                print('{}.execute-api.{}.amazonaws.com/{}{}'.
                                      format(api['id'], region.name,
                                             stage['stageName'],
                                             resource['path']))
Exemplo n.º 5
0
def audit_redshift(findings, region):
    json_blob = query_aws(region.account, "redshift-describe-clusters", region)
    for cluster in json_blob.get("Clusters", []):
        if cluster["PubliclyAccessible"]:
            findings.add(
                Finding(region, "REDSHIFT_PUBLIC_IP", cluster["ClusterIdentifier"])
            )
Exemplo n.º 6
0
def audit_es(findings, region):
    json_blob = query_aws(region.account, "es-list-domain-names", region)
    for domain in json_blob.get("DomainNames", []):
        name = domain["DomainName"]

        # Check policy
        policy_file_json = get_parameter_file(
            region, "es", "describe-elasticsearch-domain", name
        )
        # Find the entity we need
        policy_string = policy_file_json["DomainStatus"]["AccessPolicies"]
        # Load the string value as json
        policy = json.loads(policy_string)
        policy = Policy(policy)

        # ES clusters or either public, with an "Endpoint" (singular), which is bad, or
        # they are VPC-only, in which case they have an "Endpoints" (plural) array containing a "vpc" element
        if (
            policy_file_json["DomainStatus"].get("Endpoint", "") != ""
            or policy_file_json["DomainStatus"].get("Endpoints", {}).get("vpc", "")
            == ""
        ):
            if policy.is_internet_accessible():
                findings.add(
                    Finding(region, "ES_PUBLIC", name, resource_details=policy_string)
                )
Exemplo n.º 7
0
def audit_sns(findings, region):
    # Check for publicly accessible sns.
    json_blob = query_aws(region.account, "sns-list-topics", region)
    if json_blob is None:
        # Service not supported in the region
        return

    for topic in json_blob.get('Topics', []):
        # Check policy
        attributes = get_parameter_file(region, 'sns', 'get-topic-attributes',
                                        topic['TopicArn'])
        if attributes is None:
            # No policy
            continue

        # Find the entity we need
        attributes = attributes['Attributes']
        if 'Policy' in attributes:
            policy_string = attributes['Policy']
        else:
            # No policy set
            continue

        # Load the string value as json
        policy = json.loads(policy_string)
        policy = Policy(policy)
        if policy.is_internet_accessible():
            findings.add(
                Finding(region,
                        'SNS_PUBLIC',
                        topic['TopicArn'],
                        resource_details=policy_string))
Exemplo n.º 8
0
def audit_sqs(findings, region):
    # Check for publicly accessible sqs.
    json_blob = query_aws(region.account, "sqs-list-queues", region)
    if json_blob is None:
        # Service not supported in the region
        return

    for queue in json_blob.get("QueueUrls", []):
        queue_name = queue.split("/")[-1]
        # Check policy
        queue_attributes = get_parameter_file(
            region, "sqs", "get-queue-attributes", queue
        )
        if queue_attributes is None:
            # No policy
            continue

        # Find the entity we need
        attributes = queue_attributes["Attributes"]
        if "Policy" in attributes:
            policy_string = attributes["Policy"]
        else:
            # No policy set
            continue

        # Load the string value as json
        policy = json.loads(policy_string)
        policy = Policy(policy)
        if policy.is_internet_accessible():
            findings.add(
                Finding(
                    region, "SQS_PUBLIC", queue_name, resource_details=policy_string
                )
            )
Exemplo n.º 9
0
def audit_rds_snapshots(findings, region):
    json_blob = query_aws(region.account, "rds-describe-db-snapshots", region)
    for snapshot in json_blob.get('DBSnapshots', []):
        try:
            file_json = get_parameter_file(region, 'rds',
                                           'describe-db-snapshot-attributes',
                                           snapshot['DBSnapshotIdentifier'])
            for attribute in file_json['DBSnapshotAttributesResult'][
                    'DBSnapshotAttributes']:
                if attribute['AttributeName'] == 'restore':
                    if "all" in attribute['AttributeValues']:
                        findings.add(
                            Finding(region,
                                    'RDS_PUBLIC_SNAPSHOT',
                                    snapshot,
                                    resource_details={
                                        'Entities allowed to restore':
                                        attribute['AttributeValues']
                                    }))
        except OSError:
            findings.add(
                Finding(region,
                        'EXCEPTION',
                        None,
                        resource_details={
                            'location': 'Could not open RDS snapshot file',
                            'file_name': file_name
                        }))
Exemplo n.º 10
0
def audit_rds_snapshots(findings, region):
    json_blob = query_aws(region.account, "rds-describe-db-snapshots", region)
    for snapshot in json_blob.get("DBSnapshots", []):
        try:
            file_json = get_parameter_file(
                region,
                "rds",
                "describe-db-snapshot-attributes",
                snapshot["DBSnapshotIdentifier"],
            )
            for attribute in file_json["DBSnapshotAttributesResult"][
                    "DBSnapshotAttributes"]:
                if attribute["AttributeName"] == "restore":
                    if "all" in attribute["AttributeValues"]:
                        findings.add(
                            Finding(
                                region,
                                "RDS_PUBLIC_SNAPSHOT",
                                snapshot,
                                resource_details={
                                    "Entities allowed to restore":
                                    attribute["AttributeValues"]
                                },
                            ))
        except OSError:
            findings.add(
                Finding(
                    region,
                    "EXCEPTION",
                    None,
                    resource_details={
                        "location": "Could not open RDS snapshot file",
                        "file_name": file_name,
                    },
                ))
Exemplo n.º 11
0
def audit_s3_buckets(findings, region):
    buckets_json = query_aws(region.account, "s3-list-buckets", region)
    buckets = pyjq.all('.Buckets[].Name', buckets_json)
    for bucket in buckets:
        # Check policy
        try:
            policy_file_json = get_parameter_file(region, 's3',
                                                  'get-bucket-policy', bucket)
            if policy_file_json is not None:
                # Find the entity we need
                policy_string = policy_file_json['Policy']
                # Load the string value as json
                policy = json.loads(policy_string)
                policy = Policy(policy)
                if policy.is_internet_accessible():
                    if len(policy.statements) == 1 and len(
                            policy.statements[0].actions
                    ) == 1 and 's3:GetObject' in policy.statements[0].actions:
                        findings.add(
                            Finding(region, 'S3_PUBLIC_POLICY_GETOBJECT_ONLY',
                                    bucket))
                    else:
                        findings.add(
                            Finding(region,
                                    'S3_PUBLIC_POLICY',
                                    bucket,
                                    resource_details=policy_string))
                        #region, issue_id, resource_id, resource_details
        except Exception as e:
            raise e
        #findings.add(Finding(
        #        region,
        #        'EXCEPTION',
        #        bucket,
        #        resource_details={'policy': policy_string, 'exception': e, 'location': 'Exception checking policy of S3 bucket'}))
        # Check ACL
        try:
            file_json = get_parameter_file(region, 's3', 'get-bucket-acl',
                                           bucket)
            for grant in file_json['Grants']:
                uri = grant['Grantee'].get('URI', "")
                if (uri == 'http://acs.amazonaws.com/groups/global/AllUsers'
                        or uri ==
                        'http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
                    ):
                    findings.add(
                        Finding(region,
                                'S3_PUBLIC_ACL',
                                bucket,
                                resource_details=grant))
        except Exception as e:
            findings.add(
                Finding(region,
                        'EXCEPTION',
                        bucket,
                        resource_details={
                            'grant': grant,
                            'exception': e,
                            'location': 'Exception checking ACL of S3 bucket'
                        }))
Exemplo n.º 12
0
def get_access_advisor_active_counts(account, max_age=90):
    region = get_us_east_1(account)

    json_account_auth_details = query_aws(region.account, "iam-get-account-authorization-details", region)

    account_stats = {'users': {'active': 0, 'inactive': 0}, 'roles': {'active': 0, 'inactive': 0}}
    for principal_auth in [*json_account_auth_details['UserDetailList'], *json_account_auth_details['RoleDetailList']]:
        stats = {}
        stats['auth'] = principal_auth

        principal_type = 'roles'
        if 'UserName' in principal_auth:
            principal_type = 'users'

        job_id = get_parameter_file(region, 'iam', 'generate-service-last-accessed-details', principal_auth['Arn'])['JobId']
        json_last_access_details = get_parameter_file(region, 'iam', 'get-service-last-accessed-details', job_id)
        stats['last_access'] = json_last_access_details

        stats['is_inactive'] = True

        job_completion_date = datetime.datetime.strptime(json_last_access_details['JobCompletionDate'][0:10], '%Y-%m-%d')

        for service in json_last_access_details['ServicesLastAccessed']:
            if 'LastAuthenticated' in service:
                last_access_date = datetime.datetime.strptime(service['LastAuthenticated'][0:10], '%Y-%m-%d')
                if (job_completion_date - last_access_date).days < max_age:
                    stats['is_inactive'] = False
                    break

        if stats['is_inactive']:
            account_stats[principal_type]['inactive'] += 1
        else:
            account_stats[principal_type]['active'] += 1

    return account_stats
Exemplo n.º 13
0
def audit_kms(findings, region):
    # Check for publicly accessible KMS keys.
    json_blob = query_aws(region.account, "kms-list-keys", region)
    if json_blob is None:
        # Service not supported in the region
        return

    for key in json_blob.get("Keys", []):
        name = key["KeyId"]

        # Check policy
        policy_file_json = get_parameter_file(region, "kms", "get-key-policy", name)
        if policy_file_json is None:
            # No policy
            continue

        # Find the entity we need
        policy_string = policy_file_json["Policy"]
        # Load the string value as json
        policy = json.loads(policy_string)
        policy = Policy(policy)
        if policy.is_internet_accessible():
            findings.add(
                Finding(region, "KMS_PUBLIC", name, resource_details=policy_string)
            )
Exemplo n.º 14
0
def get_subnets(az):
    subnets = query_aws(az.account, "ec2-describe-subnets", az.region)
    resource_filter = (
        '.Subnets[] | select(.VpcId == "{}") | select(.AvailabilityZone == "{}")'
    )
    return pyjq.all(resource_filter.format(az.vpc.local_id, az.local_id),
                    subnets)
Exemplo n.º 15
0
def audit_sg(findings, region):
    # TODO Check if security groups allow large CIDR range (ex. 1.2.3.4/3)
    # TODO Check if an SG restricts IPv4 and then opens IPv6 or vice versa.

    cidrs = {}
    sg_json = query_aws(region.account, 'ec2-describe-security-groups', region)
    sgs = pyjq.all('.SecurityGroups[]', sg_json)
    for sg in sgs:
        cidr_and_name_list = pyjq.all('.IpPermissions[].IpRanges[]|[.CidrIp,.Description]', sg)
        for cidr, name in cidr_and_name_list:
            if not is_external_cidr(cidr):
                continue

            if is_unblockable_cidr(cidr):
                findings.add(Finding(
                    region,
                    'SG_CIDR_UNNEEDED',
                    sg['GroupId'],
                    resource_details={'cidr': cidr}))
                continue

            if cidr.startswith('0.0.0.0') and not cidr.endswith('/0'):
                findings.add(Finding(
                    region,
                    'SG_CIDR_UNEXPECTED',
                    sg['GroupId'],
                    resource_details={'cidr': cidr}))
                continue

            if cidr == '0.0.0.0/0':
                continue

            cidrs[cidr] = cidrs.get(cidr, set())
            cidrs[cidr].add(sg['GroupId'])

        for ip_permissions in sg['IpPermissions']:
            cidrs_seen = set()
            for ip_ranges in ip_permissions['IpRanges']:
                if 'CidrIp' not in ip_ranges:
                    continue
                cidr = ip_ranges['CidrIp']
                for cidr_seen in cidrs_seen:
                    if (IPNetwork(cidr_seen) in IPNetwork(cidr) or
                            IPNetwork(cidr) in IPNetwork(cidr_seen)):
                        findings.add(Finding(
                            region,
                            'SG_CIDR_OVERLAPS',
                            sg['GroupId'],
                            resource_details={'cidr1': cidr, 'cidr2': cidr_seen}))
                cidrs_seen.add(cidr)

    for cidr in cidrs:
        ip = IPNetwork(cidr)
        if ip.size > 2048:
            findings.add(Finding(
                region,
                'SG_LARGE_CIDR',
                cidr,
                resource_details={'size': ip.size, 'security_groups': cidrs[cidr]}))
Exemplo n.º 16
0
def audit_iam_policies(findings, region):
    json_blob = query_aws(region.account,
                          "iam-get-account-authorization-details", region)
    for policy in json_blob['Policies']:
        for policy_version in policy['PolicyVersionList']:
            if policy_version['IsDefaultVersion']:
                check_for_bad_policy(findings, region, policy['Arn'],
                                     policy_version['Document'])
Exemplo n.º 17
0
def audit_amis(findings, region):
    json_blob = query_aws(region.account, "ec2-describe-images", region)
    for image in json_blob.get('Images', []):
        if image['Public']:
            findings.add(Finding(
                region,
                'AMI_PUBLIC',
                image['ImageId']))
Exemplo n.º 18
0
def get_vpcs(region, outputfilter):
    vpc_filter = ""
    if "vpc-ids" in outputfilter:
        vpc_filter += " | select (.VpcId | contains({}))".format(outputfilter["vpc-ids"])
    if "vpc-names" in outputfilter:
        vpc_filter += ' | select(.Tags != null) | select (.Tags[] | (.Key == "Name") and (.Value | contains({})))'.format(outputfilter["vpc-names"])
    vpcs = query_aws(region.account, "ec2-describe-vpcs", region)
    return pyjq.all('.Vpcs[]{}'.format(vpc_filter), vpcs)
Exemplo n.º 19
0
def get_elasticsearch(region):
    es_domains = []
    domain_json = query_aws(region.account, "es-list-domain-names", region.region)
    domains = pyjq.all('.DomainNames[]', domain_json)
    for domain in domains:
        es = get_parameter_file(region, 'es', 'describe-elasticsearch-domain', domain['DomainName'])['DomainStatus']
        if 'VPCOptions' in es:
            es_domains.append(es)
    return es_domains
Exemplo n.º 20
0
def get_access_advisor_active_counts(account, max_age=90):
    region = get_us_east_1(account)

    json_account_auth_details = query_aws(
        region.account, "iam-get-account-authorization-details", region)

    account_stats = {
        "users": {
            "active": 0,
            "inactive": 0
        },
        "roles": {
            "active": 0,
            "inactive": 0
        },
    }
    for principal_auth in [
            *json_account_auth_details["UserDetailList"],
            *json_account_auth_details["RoleDetailList"],
    ]:
        stats = {}
        stats["auth"] = principal_auth

        principal_type = "roles"
        if "UserName" in principal_auth:
            principal_type = "users"

        job_id = get_parameter_file(
            region,
            "iam",
            "generate-service-last-accessed-details",
            principal_auth["Arn"],
        )["JobId"]
        json_last_access_details = get_parameter_file(
            region, "iam", "get-service-last-accessed-details", job_id)
        stats["last_access"] = json_last_access_details

        stats["is_inactive"] = True

        job_completion_date = datetime.datetime.strptime(
            json_last_access_details["JobCompletionDate"][0:10], "%Y-%m-%d")

        for service in json_last_access_details["ServicesLastAccessed"]:
            if "LastAuthenticated" in service:
                last_access_date = datetime.datetime.strptime(
                    service["LastAuthenticated"][0:10], "%Y-%m-%d")
                if (job_completion_date - last_access_date).days < max_age:
                    stats["is_inactive"] = False
                    break

        if stats["is_inactive"]:
            account_stats[principal_type]["inactive"] += 1
        else:
            account_stats[principal_type]["active"] += 1

    return account_stats
Exemplo n.º 21
0
def get_regions(account, outputfilter={}):
    # aws ec2 describe-regions
    region_data = query_aws(account, "describe-regions")

    region_filter = ""
    if "regions" in outputfilter:
        region_filter = "| select(.RegionName | contains({}))".format(outputfilter["regions"])

    regions = pyjq.all('.Regions[]{}'.format(region_filter), region_data)
    return regions
Exemplo n.º 22
0
def get_ecs_tasks(region):
    tasks = []
    clusters = query_aws(region.account, "ecs-list-clusters", region.region)
    for clusterArn in clusters.get('clusterArns', []):
        tasks_json = get_parameter_file(region, 'ecs', 'list-tasks', clusterArn)
        for taskArn in tasks_json['taskArns']:
            task_path = 'account-data/{}/{}/{}/{}/{}'.format(region.account.name, region.region.name, 'ecs-describe-tasks', urllib.parse.quote_plus(clusterArn), urllib.parse.quote_plus(taskArn))
            task = json.load(open(task_path))
            tasks.append(task['tasks'][0])
    return tasks
Exemplo n.º 23
0
def audit_route53(findings, region):
    json_blob = query_aws(region.account, "route53domains-list-domains",
                          region)
    for domain in json_blob.get("Domains", []):
        if not domain["AutoRenew"]:
            findings.add(
                Finding(region, "DOMAIN_NOT_SET_TO_RENEW",
                        domain["DomainName"], None))
        if not domain["TransferLock"]:
            findings.add(
                Finding(region, "DOMAIN_HAS_NO_TRANSFER_LOCK",
                        domain["DomainName"], None))

    # Check VPC hosted zones
    regions_json = query_aws(region.account, "describe-regions")
    regions = pyjq.all(".Regions[].RegionName", regions_json)
    for region_name in regions:
        vpc_json = query_aws(region.account, "ec2-describe-vpcs", region_name)
        vpcs = pyjq.all(
            '.Vpcs[]? | select(.OwnerId=="{}").VpcId'.format(
                region.account.local_id),
            vpc_json,
        )
        for vpc in vpcs:
            hosted_zone_file = f"account-data/{region.account.name}/{region.name}/route53-list-hosted-zones-by-vpc/{region_name}/{vpc}"
            hosted_zones_json = json.load(open(hosted_zone_file))
            hosted_zones = pyjq.all(".HostedZoneSummaries[]?",
                                    hosted_zones_json)
            for hosted_zone in hosted_zones:
                if hosted_zone.get("Owner", {}).get("OwningAccount", "") != "":
                    if hosted_zone["Owner"][
                            "OwningAccount"] != region.account.local_id:
                        findings.add(
                            Finding(
                                region,
                                "FOREIGN_HOSTED_ZONE",
                                hosted_zone,
                                resource_datails={
                                    "vpc_id": vpc,
                                    "vpc_regions": region_name,
                                },
                            ))
Exemplo n.º 24
0
def audit_rds(findings, region):
    json_blob = query_aws(region.account, "rds-describe-db-instances", region)
    for instance in json_blob.get("DBInstances", []):
        if instance["PubliclyAccessible"]:
            findings.add(
                Finding(region, "RDS_PUBLIC_IP",
                        instance["DBInstanceIdentifier"]))
        if instance.get("DBSubnetGroup", {}).get("VpcId", "") == "":
            findings.add(
                Finding(region, "RDS_VPC_CLASSIC",
                        instance["DBInstanceIdentifier"]))
Exemplo n.º 25
0
def get_elasticsearch(region):
    es_domains = []
    domain_json = query_aws(region.account, "es-list-domain-names",
                            region.region)
    domains = pyjq.all(".DomainNames[]", domain_json)
    for domain in domains:
        es = get_parameter_file(region, "es", "describe-elasticsearch-domain",
                                domain["DomainName"])["DomainStatus"]
        if "VPCOptions" in es:
            es_domains.append(es)
    return es_domains
Exemplo n.º 26
0
def audit_rds(findings, region):
    json_blob = query_aws(region.account, "rds-describe-db-instances", region)
    for instance in json_blob.get('DBInstances', []):
        if instance['PubliclyAccessible']:
            findings.add(
                Finding(region, 'RDS_PUBLIC_IP',
                        instance['DBInstanceIdentifier']))
        if instance.get('DBSubnetGroup', {}).get('VpcId', '') == '':
            findings.add(
                Finding(region, 'RDS_VPC_CLASSIC',
                        instance['DBInstanceIdentifier']))
Exemplo n.º 27
0
 def security_groups(self):
     sgs = []
     for detail in pyjq.all('.attachments[].details[]', self._json_blob):
         if detail['name'] == 'networkInterfaceId':
             eni = detail['value']
             interfaces_json = query_aws(self.account, 'ec2-describe-network-interfaces', self.region)
             for interface in interfaces_json['NetworkInterfaces']:
                 if interface['NetworkInterfaceId'] == eni:
                     for group in interface['Groups']:
                         sgs.append(group['GroupId'])
     return sgs
Exemplo n.º 28
0
    def subnets(self):
        if self._subnet:
            return self._subnet
        else:
            # Get the subnets that this cluster can be a part of
            cluster_subnet_group_name = self._json_blob[
                'ClusterSubnetGroupName']
            vpc_id = self._json_blob['VpcId']
            subnet_groups_json = query_aws(
                self.account, 'redshift-describe-cluster-subnet-groups',
                self.region)
            matched_subnet_group = {}
            for subnet_group in subnet_groups_json['ClusterSubnetGroups']:
                if vpc_id == subnet_group[
                        'VpcId'] and cluster_subnet_group_name == subnet_group[
                            'ClusterSubnetGroupName']:
                    matched_subnet_group = subnet_group
            if matched_subnet_group == {}:
                raise Exception("Could not find the subnet group")

            # Get the IDs of those subnets
            subnet_ids = []
            for subnet in matched_subnet_group['Subnets']:
                subnet_ids.append(subnet['SubnetIdentifier'])

            # Look through the subnets in the regions for ones that match,
            # then find those subnets that actually have the IPs for the cluster nodes in them
            subnets_with_cluster_nodes = []
            subnets = query_aws(self.account, 'ec2-describe-subnets',
                                self.region)
            for subnet in subnets['Subnets']:
                if subnet['SubnetId'] in subnet_ids:
                    # We have a subnet ID that we know the cluster can be part of, now check if there is actually a node there
                    for cluster_node in self._json_blob['ClusterNodes']:
                        if IPAddress(
                                cluster_node['PrivateIPAddress']) in IPNetwork(
                                    subnet['CidrBlock']):
                            subnets_with_cluster_nodes.append(
                                subnet['SubnetId'])

            return subnets_with_cluster_nodes
Exemplo n.º 29
0
def audit_route53(findings, region):
    json_blob = query_aws(region.account, "route53domains-list-domains",
                          region)
    for domain in json_blob.get("Domains", []):
        if not domain["AutoRenew"]:
            findings.add(
                Finding(region, "DOMAIN_NOT_SET_TO_RENEW",
                        domain["DomainName"], None))
        if not domain["TransferLock"]:
            findings.add(
                Finding(region, "DOMAIN_HAS_NO_TRANSFER_LOCK",
                        domain["DomainName"], None))
Exemplo n.º 30
0
def audit_accessanalyzer(findings, region):
    analyzer_list_json = query_aws(region.account,
                                   "accessanalyzer-list-analyzers", region)
    if not analyzer_list_json:
        # Access Analyzer must not exist in this region (or the collect data is old)
        return
    is_enabled = False
    for analyzer in analyzer_list_json["analyzers"]:
        if analyzer["status"] == "ACTIVE":
            is_enabled = True
    if not is_enabled:
        findings.add(Finding(region, "ACCESSANALYZER_OFF", None, None))