Beispiel #1
0
def get_access_advisor(region, principal_stats, json_account_auth_details,
                       args):
    for principal_auth in [
            *json_account_auth_details['UserDetailList'],
            *json_account_auth_details['RoleDetailList']
    ]:
        stats = {}
        stats['auth'] = principal_auth
        job_id = get_parameter_file(region, 'iam',
                                    'generate-service-last-accessed-details',
                                    principal_auth['Arn'])['JobId']
        json_last_access_details = get_parameter_file(
            region, 'iam', 'get-service-last-accessed-details', job_id)
        stats['last_access'] = json_last_access_details

        stats['is_inactive'] = True

        job_completion_date = datetime.datetime.strptime(
            json_last_access_details['JobCompletionDate'][0:10], '%Y-%m-%d')

        for service in json_last_access_details['ServicesLastAccessed']:
            if 'LastAuthenticated' in service:
                last_access_date = datetime.datetime.strptime(
                    service['LastAuthenticated'][0:10], '%Y-%m-%d')
                service['days_since_last_use'] = (job_completion_date -
                                                  last_access_date).days
                if service['days_since_last_use'] < args.max_age:
                    stats['is_inactive'] = False
                    break

        principal_stats[principal_auth['Arn']] = stats
Beispiel #2
0
def audit_s3_buckets(findings, region):
    buckets_json = query_aws(region.account, "s3-list-buckets", region)
    buckets = pyjq.all('.Buckets[].Name', buckets_json)
    for bucket in buckets:
        # Check policy
        try:
            policy_file_json = get_parameter_file(region, 's3',
                                                  'get-bucket-policy', bucket)
            if policy_file_json is not None:
                # Find the entity we need
                policy_string = policy_file_json['Policy']
                # Load the string value as json
                policy = json.loads(policy_string)
                policy = Policy(policy)
                if policy.is_internet_accessible():
                    if len(policy.statements) == 1 and len(
                            policy.statements[0].actions
                    ) == 1 and 's3:GetObject' in policy.statements[0].actions:
                        findings.add(
                            Finding(region, 'S3_PUBLIC_POLICY_GETOBJECT_ONLY',
                                    bucket))
                    else:
                        findings.add(
                            Finding(region,
                                    'S3_PUBLIC_POLICY',
                                    bucket,
                                    resource_details=policy_string))
                        #region, issue_id, resource_id, resource_details
        except Exception as e:
            raise e
        #findings.add(Finding(
        #        region,
        #        'EXCEPTION',
        #        bucket,
        #        resource_details={'policy': policy_string, 'exception': e, 'location': 'Exception checking policy of S3 bucket'}))
        # Check ACL
        try:
            file_json = get_parameter_file(region, 's3', 'get-bucket-acl',
                                           bucket)
            for grant in file_json['Grants']:
                uri = grant['Grantee'].get('URI', "")
                if (uri == 'http://acs.amazonaws.com/groups/global/AllUsers'
                        or uri ==
                        'http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
                    ):
                    findings.add(
                        Finding(region,
                                'S3_PUBLIC_ACL',
                                bucket,
                                resource_details=grant))
        except Exception as e:
            findings.add(
                Finding(region,
                        'EXCEPTION',
                        bucket,
                        resource_details={
                            'grant': grant,
                            'exception': e,
                            'location': 'Exception checking ACL of S3 bucket'
                        }))
Beispiel #3
0
def api_endpoints(accounts, config):
    for account in accounts:
        account = Account(None, account)
        for region_json in get_regions(account):
            region = Region(account, region_json)

            # Look for API Gateway
            json_blob = query_aws(region.account, 'apigateway-get-rest-apis',
                                  region)
            if json_blob is None:
                continue
            for api in json_blob.get('items', []):
                rest_id = api['id']
                deployments = get_parameter_file(region, 'apigateway',
                                                 'get-deployments', rest_id)
                if deployments is None:
                    continue
                for deployment in deployments['items']:
                    deployment_id = deployment['id']
                    stages = get_parameter_file(region, 'apigateway',
                                                'get-stages', rest_id)
                    if stages is None:
                        continue
                    for stage in stages['item']:
                        if stage['deploymentId'] == deployment_id:
                            resources = get_parameter_file(
                                region, 'apigateway', 'get-resources', rest_id)
                            if resources is None:
                                continue
                            for resource in resources['items']:
                                print('{}.execute-api.{}.amazonaws.com/{}{}'.
                                      format(api['id'], region.name,
                                             stage['stageName'],
                                             resource['path']))
def get_access_advisor(region, principal_stats, json_account_auth_details,
                       args):
    for principal_auth in [
            *json_account_auth_details["UserDetailList"],
            *json_account_auth_details["RoleDetailList"],
    ]:
        stats = {}
        stats["auth"] = principal_auth
        job_id = get_parameter_file(
            region,
            "iam",
            "generate-service-last-accessed-details",
            principal_auth["Arn"],
        )["JobId"]
        json_last_access_details = get_parameter_file(
            region, "iam", "get-service-last-accessed-details", job_id)
        stats["last_access"] = json_last_access_details

        stats["is_inactive"] = True

        job_completion_date = datetime.datetime.strptime(
            json_last_access_details["JobCompletionDate"][0:10], "%Y-%m-%d")

        for service in json_last_access_details["ServicesLastAccessed"]:
            if "LastAuthenticated" in service:
                last_access_date = datetime.datetime.strptime(
                    service["LastAuthenticated"][0:10], "%Y-%m-%d")
                service["days_since_last_use"] = (job_completion_date -
                                                  last_access_date).days
                if service["days_since_last_use"] < args.max_age:
                    stats["is_inactive"] = False
                    break

        principal_stats[principal_auth["Arn"]] = stats
Beispiel #5
0
def get_access_advisor_active_counts(account, max_age=90):
    region = get_us_east_1(account)

    json_account_auth_details = query_aws(region.account, "iam-get-account-authorization-details", region)

    account_stats = {'users': {'active': 0, 'inactive': 0}, 'roles': {'active': 0, 'inactive': 0}}
    for principal_auth in [*json_account_auth_details['UserDetailList'], *json_account_auth_details['RoleDetailList']]:
        stats = {}
        stats['auth'] = principal_auth

        principal_type = 'roles'
        if 'UserName' in principal_auth:
            principal_type = 'users'

        job_id = get_parameter_file(region, 'iam', 'generate-service-last-accessed-details', principal_auth['Arn'])['JobId']
        json_last_access_details = get_parameter_file(region, 'iam', 'get-service-last-accessed-details', job_id)
        stats['last_access'] = json_last_access_details

        stats['is_inactive'] = True

        job_completion_date = datetime.datetime.strptime(json_last_access_details['JobCompletionDate'][0:10], '%Y-%m-%d')

        for service in json_last_access_details['ServicesLastAccessed']:
            if 'LastAuthenticated' in service:
                last_access_date = datetime.datetime.strptime(service['LastAuthenticated'][0:10], '%Y-%m-%d')
                if (job_completion_date - last_access_date).days < max_age:
                    stats['is_inactive'] = False
                    break

        if stats['is_inactive']:
            account_stats[principal_type]['inactive'] += 1
        else:
            account_stats[principal_type]['active'] += 1

    return account_stats
def get_access_advisor_active_counts(account, max_age=90):
    region = get_us_east_1(account)

    json_account_auth_details = query_aws(
        region.account, "iam-get-account-authorization-details", region)

    account_stats = {
        "users": {
            "active": 0,
            "inactive": 0
        },
        "roles": {
            "active": 0,
            "inactive": 0
        },
    }
    for principal_auth in [
            *json_account_auth_details["UserDetailList"],
            *json_account_auth_details["RoleDetailList"],
    ]:
        stats = {}
        stats["auth"] = principal_auth

        principal_type = "roles"
        if "UserName" in principal_auth:
            principal_type = "users"

        job_id = get_parameter_file(
            region,
            "iam",
            "generate-service-last-accessed-details",
            principal_auth["Arn"],
        )["JobId"]
        json_last_access_details = get_parameter_file(
            region, "iam", "get-service-last-accessed-details", job_id)
        stats["last_access"] = json_last_access_details

        stats["is_inactive"] = True

        job_completion_date = datetime.datetime.strptime(
            json_last_access_details["JobCompletionDate"][0:10], "%Y-%m-%d")

        for service in json_last_access_details["ServicesLastAccessed"]:
            if "LastAuthenticated" in service:
                last_access_date = datetime.datetime.strptime(
                    service["LastAuthenticated"][0:10], "%Y-%m-%d")
                if (job_completion_date - last_access_date).days < max_age:
                    stats["is_inactive"] = False
                    break

        if stats["is_inactive"]:
            account_stats[principal_type]["inactive"] += 1
        else:
            account_stats[principal_type]["active"] += 1

    return account_stats
Beispiel #7
0
def audit_rds_snapshots(findings, region):
    json_blob = query_aws(region.account, "rds-describe-db-snapshots", region)
    for snapshot in json_blob.get("DBSnapshots", []):
        try:
            file_json = get_parameter_file(
                region,
                "rds",
                "describe-db-snapshot-attributes",
                snapshot["DBSnapshotIdentifier"],
            )
            for attribute in file_json["DBSnapshotAttributesResult"][
                    "DBSnapshotAttributes"]:
                if attribute["AttributeName"] == "restore":
                    if "all" in attribute["AttributeValues"]:
                        findings.add(
                            Finding(
                                region,
                                "RDS_PUBLIC_SNAPSHOT",
                                snapshot,
                                resource_details={
                                    "Entities allowed to restore":
                                    attribute["AttributeValues"]
                                },
                            ))
        except OSError:
            findings.add(
                Finding(
                    region,
                    "EXCEPTION",
                    None,
                    resource_details={
                        "location": "Could not open RDS snapshot file",
                        "file_name": file_name,
                    },
                ))
Beispiel #8
0
def audit_es(findings, region):
    json_blob = query_aws(region.account, "es-list-domain-names", region)
    for domain in json_blob.get("DomainNames", []):
        name = domain["DomainName"]

        # Check policy
        policy_file_json = get_parameter_file(region, "es",
                                              "describe-elasticsearch-domain",
                                              name)
        # Find the entity we need
        policy_string = policy_file_json["DomainStatus"]["AccessPolicies"]
        # Load the string value as json
        policy = json.loads(policy_string)
        policy = Policy(policy)

        # ES clusters or either public, with an "Endpoint" (singular), which is bad, or
        # they are VPC-only, in which case they have an "Endpoints" (plural) array containing a "vpc" element
        if (policy_file_json["DomainStatus"].get("Endpoint", "") != ""
                or policy_file_json["DomainStatus"].get("Endpoints", {}).get(
                    "vpc", "") == ""):
            if policy.is_internet_accessible():
                findings.add(
                    Finding(region,
                            "ES_PUBLIC",
                            name,
                            resource_details=policy_string))
Beispiel #9
0
def audit_glacier(findings, region):
    # Check for publicly accessible vaults.
    json_blob = query_aws(region.account, "glacier-list-vaults", region)
    if json_blob is None:
        # Service not supported in the region
        return

    for vault in json_blob.get("VaultList", []):
        name = vault["VaultName"]

        # Check policy
        policy_file_json = get_parameter_file(region, "glacier",
                                              "get-vault-access-policy", name)
        if policy_file_json is None:
            # No policy
            continue

        # Find the entity we need
        policy_string = policy_file_json["policy"]["Policy"]
        # Load the string value as json
        policy = json.loads(policy_string)
        policy = Policy(policy)
        if policy.is_internet_accessible():
            findings.add(
                Finding(region,
                        "GLACIER_PUBLIC",
                        name,
                        resource_details=policy_string))
Beispiel #10
0
def audit_rds_snapshots(findings, region):
    json_blob = query_aws(region.account, "rds-describe-db-snapshots", region)
    for snapshot in json_blob.get('DBSnapshots', []):
        try:
            file_json = get_parameter_file(region, 'rds',
                                           'describe-db-snapshot-attributes',
                                           snapshot['DBSnapshotIdentifier'])
            for attribute in file_json['DBSnapshotAttributesResult'][
                    'DBSnapshotAttributes']:
                if attribute['AttributeName'] == 'restore':
                    if "all" in attribute['AttributeValues']:
                        findings.add(
                            Finding(region,
                                    'RDS_PUBLIC_SNAPSHOT',
                                    snapshot,
                                    resource_details={
                                        'Entities allowed to restore':
                                        attribute['AttributeValues']
                                    }))
        except OSError:
            findings.add(
                Finding(region,
                        'EXCEPTION',
                        None,
                        resource_details={
                            'location': 'Could not open RDS snapshot file',
                            'file_name': file_name
                        }))
Beispiel #11
0
def audit_kms(findings, region):
    # Check for publicly accessible KMS keys.
    json_blob = query_aws(region.account, "kms-list-keys", region)
    if json_blob is None:
        # Service not supported in the region
        return

    for key in json_blob.get("Keys", []):
        name = key["KeyId"]

        # Check policy
        policy_file_json = get_parameter_file(region, "kms", "get-key-policy",
                                              name)
        if policy_file_json is None:
            # No policy
            continue

        # Find the entity we need
        policy_string = policy_file_json["Policy"]
        # Load the string value as json
        policy = json.loads(policy_string)
        policy = Policy(policy)
        if policy.is_internet_accessible():
            findings.add(
                Finding(region,
                        "KMS_PUBLIC",
                        name,
                        resource_details=policy_string))
Beispiel #12
0
def audit_sqs(findings, region):
    # Check for publicly accessible sqs.
    json_blob = query_aws(region.account, "sqs-list-queues", region)
    if json_blob is None:
        # Service not supported in the region
        return

    for queue in json_blob.get("QueueUrls", []):
        queue_name = queue.split("/")[-1]
        # Check policy
        queue_attributes = get_parameter_file(region, "sqs",
                                              "get-queue-attributes", queue)
        if queue_attributes is None:
            # No policy
            continue

        # Find the entity we need
        attributes = queue_attributes["Attributes"]
        if "Policy" in attributes:
            policy_string = attributes["Policy"]
        else:
            # No policy set
            continue

        # Load the string value as json
        policy = json.loads(policy_string)
        policy = Policy(policy)
        if policy.is_internet_accessible():
            findings.add(
                Finding(region,
                        "SQS_PUBLIC",
                        queue_name,
                        resource_details=policy_string))
Beispiel #13
0
def audit_sns(findings, region):
    # Check for publicly accessible sns.
    json_blob = query_aws(region.account, "sns-list-topics", region)
    if json_blob is None:
        # Service not supported in the region
        return

    for topic in json_blob.get("Topics", []):
        # Check policy
        attributes = get_parameter_file(region, "sns", "get-topic-attributes",
                                        topic["TopicArn"])
        if attributes is None:
            # No policy
            continue

        # Find the entity we need
        attributes = attributes["Attributes"]
        if "Policy" in attributes:
            policy_string = attributes["Policy"]
        else:
            # No policy set
            continue

        # Load the string value as json
        policy = json.loads(policy_string)
        policy = Policy(policy)
        if policy.is_internet_accessible():
            findings.add(
                Finding(
                    region,
                    "SNS_PUBLIC",
                    topic["TopicArn"],
                    resource_details=policy_string,
                ))
Beispiel #14
0
def audit_ebs_snapshots(findings, region):
    json_blob = query_aws(region.account, "ec2-describe-snapshots", region)
    for snapshot in json_blob['Snapshots']:
        try:
            file_json = get_parameter_file(region, 'ec2', 'describe-snapshot-attribute', snapshot['SnapshotId'])
            if file_json is None:
                # Not technically an exception, but an unexpected situation
                findings.add(Finding(
                    region,
                    'EXCEPTION',
                    snapshot,
                    resource_details={'location': 'EBS snapshot has no attributes'}))
                continue
            for attribute in file_json['CreateVolumePermissions']:
                if attribute.get('Group', 'self') != 'self':
                    findings.add(Finding(
                        region,
                        'EBS_SNAPSHOT_PUBLIC',
                        snapshot,
                        resource_details={'Entities allowed to restore': attribute['Group']}))
        except OSError:
            findings.add(Finding(
                region,
                'EXCEPTION',
                None,
                resource_details={
                    'location': 'Could not open EBS snapshot file',
                    'file_name': file_name}))
Beispiel #15
0
 def tags(self):
     tags = get_parameter_file(self.region, 'elbv2', 'describe-tags', self._json_blob['LoadBalancerName']) 
     if tags is None:
         return []
     descriptions = tags['TagDescriptions']
     if descriptions is None or len(descriptions) == 0:
         return []
     return descriptions[0]['Tags']
Beispiel #16
0
def get_elasticsearch(region):
    es_domains = []
    domain_json = query_aws(region.account, "es-list-domain-names", region.region)
    domains = pyjq.all('.DomainNames[]', domain_json)
    for domain in domains:
        es = get_parameter_file(region, 'es', 'describe-elasticsearch-domain', domain['DomainName'])['DomainStatus']
        if 'VPCOptions' in es:
            es_domains.append(es)
    return es_domains
Beispiel #17
0
 def tags(self):
     tags = get_parameter_file(self.region, "elbv2", "describe-tags",
                               self._json_blob["LoadBalancerName"])
     if tags is None:
         return []
     descriptions = tags["TagDescriptions"]
     if descriptions is None or len(descriptions) == 0:
         return []
     return descriptions[0]["Tags"]
Beispiel #18
0
def get_ecs_tasks(region):
    tasks = []
    clusters = query_aws(region.account, "ecs-list-clusters", region.region)
    for clusterArn in clusters.get('clusterArns', []):
        tasks_json = get_parameter_file(region, 'ecs', 'list-tasks', clusterArn)
        for taskArn in tasks_json['taskArns']:
            task_path = 'account-data/{}/{}/{}/{}/{}'.format(region.account.name, region.region.name, 'ecs-describe-tasks', urllib.parse.quote_plus(clusterArn), urllib.parse.quote_plus(taskArn))
            task = json.load(open(task_path))
            tasks.append(task['tasks'][0])
    return tasks
Beispiel #19
0
 def tags(self):
     tags = get_parameter_file(
         self.region,
         "rds",
         "list-tags-for-resource",
         self._json_blob["DBInstanceArn"],
     )
     if tags is None:
         return []
     return tags["TagList"]
Beispiel #20
0
def get_elasticsearch(region):
    es_domains = []
    domain_json = query_aws(region.account, "es-list-domain-names",
                            region.region)
    domains = pyjq.all(".DomainNames[]", domain_json)
    for domain in domains:
        es = get_parameter_file(region, "es", "describe-elasticsearch-domain",
                                domain["DomainName"])["DomainStatus"]
        if "VPCOptions" in es:
            es_domains.append(es)
    return es_domains
Beispiel #21
0
def get_account_stats(account, all_resources=False):
    """Returns stats for an account"""

    with open("stats_config.yaml", 'r') as f:
        resources = yaml.safe_load(f)

    account = Account(None, account)
    log_debug('Collecting stats in account {} ({})'.format(account.name, account.local_id))

    stats = {}
    stats['keys'] = []
    for resource in resources:
        # If the resource is marked as verbose, and we're not showing all resources, skip it.
        if resource.get('verbose',False) and not all_resources:
            continue
        stats['keys'].append(resource['name'])
        stats[resource['name']] = {}

    for region_json in get_regions(account):
        region = Region(account, region_json)

        for resource in resources:
            if resource.get('verbose',False) and not all_resources:
                continue

            # Skip global services (just CloudFront)
            if ('region' in resource) and (resource['region'] != region.name):
                continue

            # S3 buckets require special code to identify their location
            if resource['name'] == 'S3 buckets':
                if region.name == 'us-east-1':
                    buckets = pyjq.all('.Buckets[].Name', query_aws(region.account, 's3-list-buckets', region))
                    for bucket in buckets:
                        # Get the bucket's location
                        bucket_region = get_parameter_file(region, 's3', 'get-bucket-location', bucket)['LocationConstraint']

                        # Convert the value to a name.
                        # See https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
                        if bucket_region is None:
                            bucket_region = 'us-east-1'
                        elif bucket_region == 'EU':
                            bucket_region = 'eu-west-1'

                        # Increment the count
                        tmp = stats[resource['name']].get(bucket_region, 0)
                        stats[resource['name']][bucket_region] = tmp + 1
            else:
                # Normal path
                stats[resource['name']][region.name] = sum(pyjq.all(resource['query'],
                                                                    query_aws(region.account, resource['source'], region)))

    return stats
Beispiel #22
0
def get_ecs_tasks(region):
    tasks = []
    clusters = query_aws(region.account, "ecs-list-clusters", region.region)
    for clusterArn in clusters.get("clusterArns", []):
        tasks_json = get_parameter_file(region, "ecs", "list-tasks",
                                        clusterArn)
        for i in range(0, len(tasks_json["taskArns"]) // 100):
            task_path = "account-data/{}/{}/{}/{}/{}".format(
                region.account.name, region.region.name, "ecs-describe-tasks",
                urllib.parse.quote_plus(clusterArn),
                urllib.parse.quote_plus(f"describe_tasks_{i}"))
            cluster_tasks = json.load(open(task_path))
            tasks += cluster_tasks["tasks"]
    return tasks
Beispiel #23
0
def audit_guardduty(findings, region):
    detector_list_json = query_aws(region.account, "guardduty-list-detectors",
                                   region)
    if not detector_list_json:
        # GuardDuty must not exist in this region (or the collect data is old)
        return
    is_enabled = False
    for detector in detector_list_json["DetectorIds"]:
        detector_json = get_parameter_file(region, "guardduty", "get-detector",
                                           detector)
        if detector_json["Status"] == "ENABLED":
            is_enabled = True
    if not is_enabled:
        findings.add(Finding(region, "GUARDDUTY_OFF", None, None))
def api_endpoints(accounts, config):
    for account in accounts:
        account = Account(None, account)
        for region_json in get_regions(account):
            region = Region(account, region_json)

            # Look for API Gateway
            json_blob = query_aws(region.account, "apigateway-get-rest-apis",
                                  region)
            if json_blob is None:
                continue
            for api in json_blob.get("items", []):
                rest_id = api["id"]
                deployments = get_parameter_file(region, "apigateway",
                                                 "get-deployments", rest_id)
                if deployments is None:
                    continue
                for deployment in deployments["items"]:
                    deployment_id = deployment["id"]
                    stages = get_parameter_file(region, "apigateway",
                                                "get-stages", rest_id)
                    if stages is None:
                        continue
                    for stage in stages["item"]:
                        if stage["deploymentId"] == deployment_id:
                            resources = get_parameter_file(
                                region, "apigateway", "get-resources", rest_id)
                            if resources is None:
                                continue
                            for resource in resources["items"]:
                                print("{}.execute-api.{}.amazonaws.com/{}{}".
                                      format(
                                          api["id"],
                                          region.name,
                                          stage["stageName"],
                                          resource["path"],
                                      ))
Beispiel #25
0
def get_ecs_tasks(region):
    tasks = []
    clusters = query_aws(region.account, "ecs-list-clusters", region.region)
    for clusterArn in clusters.get("clusterArns", []):
        tasks_json = get_parameter_file(region, "ecs", "list-tasks", clusterArn)
        for taskArn in tasks_json["taskArns"]:
            task_path = "account-data/{}/{}/{}/{}/{}".format(
                region.account.name,
                region.region.name,
                "ecs-describe-tasks",
                urllib.parse.quote_plus(clusterArn),
                urllib.parse.quote_plus(taskArn),
            )
            task = json.load(open(task_path))
            tasks.append(task["tasks"][0])
    return tasks
Beispiel #26
0
def audit_guardduty(findings, region):
    for region_json in get_regions(region.account):
        region = Region(region.account, region_json)
        detector_list_json = query_aws(region.account,
                                       "guardduty-list-detectors", region)
        if not detector_list_json:
            # GuardDuty must not exist in this region (or the collect data is old)
            continue
        is_enabled = False
        for detector in detector_list_json['DetectorIds']:
            detector_json = get_parameter_file(region, 'guardduty',
                                               'get-detector', detector)
            if detector_json['Status'] == 'ENABLED':
                is_enabled = True
        if not is_enabled:
            findings.add(Finding(region, 'GUARDDUTY_OFF', None, None))
Beispiel #27
0
def audit_elbv2(findings, region):
    json_blob = query_aws(region.account, "elbv2-describe-load-balancers",
                          region)

    for load_balancer in json_blob.get("LoadBalancers", []):
        arn = load_balancer["LoadBalancerArn"]

        # Check attributes
        attributes_json = get_parameter_file(
            region, "elbv2", "describe-load-balancer-attributes", arn)

        for attribute in attributes_json.get("Attributes", []):
            if (attribute["Key"]
                    == "routing.http.drop_invalid_header_fields.enabled"
                    and attribute["Value"] == "false"):
                findings.add(Finding(region, "REQUEST_SMUGGLING", arn))
Beispiel #28
0
def audit_s3_block_policy(findings, region):
    caller_identity_json = query_aws(region.account, "sts-get-caller-identity", region)
    block_policy_json = get_parameter_file(region, 's3control', 'get-public-access-block', caller_identity_json['Account'])
    if block_policy_json is None:
        findings.add(Finding(
            region,
            'S3_ACCESS_BLOCK_OFF',
            None))
    else:
        conf = block_policy_json['PublicAccessBlockConfiguration']
        if not conf['BlockPublicAcls'] or not conf['BlockPublicPolicy'] or not conf['IgnorePublicAcls'] or not conf['RestrictPublicBuckets']:
            findings.add(Finding(
                region,
                'S3_ACCESS_BLOCK_ALL_ACCESS_TYPES',
                None,
                resource_details=block_policy_json))
Beispiel #29
0
def audit_elbv1(findings, region):
    json_blob = query_aws(region.account, "elb-describe-load-balancers",
                          region)

    for load_balancer in json_blob.get("LoadBalancerDescriptions", []):
        lb_name = load_balancer["LoadBalancerName"]

        # Check attributes
        attributes_json = get_parameter_file(
            region, "elb", "describe-load-balancer-attributes", lb_name)

        for attribute in attributes_json.get("LoadBalancerAttributes",
                                             [])["AdditionalAttributes"]:
            if (attribute["Key"] == "elb.http.desyncmitigationmode"
                    and attribute["Value"] != "strictest"):
                findings.add(
                    Finding(region, "ELBV1_DESYNC_MITIGATION", lb_name))
Beispiel #30
0
def audit_ebs_snapshots(findings, region):
    json_blob = query_aws(region.account, "ec2-describe-snapshots", region)
    for snapshot in json_blob["Snapshots"]:
        try:
            file_json = get_parameter_file(region, "ec2",
                                           "describe-snapshot-attribute",
                                           snapshot["SnapshotId"])
            if file_json is None:
                # Not technically an exception, but an unexpected situation
                findings.add(
                    Finding(
                        region,
                        "EXCEPTION",
                        snapshot,
                        resource_details={
                            "location": "EBS snapshot has no attributes"
                        },
                    ))
                continue
            for attribute in file_json["CreateVolumePermissions"]:
                if attribute.get("Group", "self") != "self":
                    findings.add(
                        Finding(
                            region,
                            "EBS_SNAPSHOT_PUBLIC",
                            snapshot,
                            resource_details={
                                "Entities allowed to restore":
                                attribute["Group"]
                            },
                        ))
        except OSError:
            findings.add(
                Finding(
                    region,
                    "EXCEPTION",
                    None,
                    resource_details={
                        "location": "Could not open EBS snapshot file",
                        "file_name": file_name,
                    },
                ))