Ejemplo n.º 1
0
def pubsub_trigger(data, context):
    """
    Used with Pub/Sub trigger method to evaluate firewall rules with 0.0.0.0/0 source ingress.
    """

    # Integrates cloud logging handler to python logging
    create_logger()

    # Converting log to json
    data_buffer = b64decode(data['data'])
    log_entry = json.loads(data_buffer)

    # Parse project id and resource name for allow/deny list checking and remediation
    resource_name = log_entry['protoPayload']['resourceName'].split('/')
    project_id = resource_name[1]
    firewall_name = resource_name[-1]

    if check_list(project_id):
        logging.info(
            f'The project {project_id} is not in the allowlist, is in the denylist, or a list is not fully configured. Continuing evaluation.'
        )
        check_resource(project_id, firewall_name)
    else:
        logging.info(
            f'The project {project_id} is in the allowlist or is not in the denylist. No action being taken.'
        )

    # This function sits around 133mb of memory usage at creation date, if we don't cleanup, high activity projects may run into issues.
    del log_entry
    gc.collect()
Ejemplo n.º 2
0
def pubsub_trigger(data, context):
    """
    Used with Pub/Sub trigger method to evaluate bucket for public access and remediate if public access exists.
    """

    # Integrates cloud logging handler to python logging
    create_logger()
    logging.info(
        'Received GCS permissions update log from Pub/Sub. Checking for public access.'
    )

    # Determine if CFN is running in view-only mode
    try:
        mode = getenv('MODE')
    except:
        logging.error('Mode not found in environment variable.')

    # Converting log to json
    data_buffer = base64.b64decode(data['data'])
    log_entry = json.loads(data_buffer)

    # Setting GCS bucket name and project ID variables from log
    bucket_name = log_entry['resource']['labels']['bucket_name']
    project_id = log_entry['resource']['labels']['project_id']

    # Check our project_id against the project list set at deployment
    if check_list(project_id):
        logging.info(
            f'The project {project_id} is not in the allowlist, is in the denylist, or a list is not fully configured. Continuing evaluation.'
        )
        # Configuring storage client
        storage_client = storage.Client()
        bucket = storage_client.bucket(bucket_name)

        # Get the current GCS bucket policy
        try:
            policy = bucket.get_iam_policy()
        except:
            logging.error(f'Could not view bucket: {bucket_name} IAM policy.')

        # Evaluating GCS bucket policy for public bindings
        eval_bucket(bucket_name, policy, bucket, project_id, mode)
    else:
        logging.info(
            f'The project {project_id} is in the allowlist or is not in the denylist. No action being taken.'
        )
Ejemplo n.º 3
0
def pubsub_trigger(data, context):
    """
    Used with Pub/Sub trigger method to evaluate the BigQuery Table for public members.
    """

    # Integrates cloud logging handler to python logging
    create_logger()
    logging.info(
        'Received BigQuery table permissions update log from Pub/Sub. Checking for public access.'
    )

    # Determine if CFN is running in view-only mode
    try:
        mode = getenv('MODE')
    except:
        logging.error('Mode not found in environment variable.')

    # Determine alerting Pub/Sub topic
    try:
        topic_id = getenv('TOPIC_ID')
    except:
        logging.error('Topic ID not found in environment variable.')

    #Create BigQuery Client
    client = bigquery.Client()

    # Converting log to json
    data_buffer = base64.b64decode(data['data'])
    log_entry = json.loads(data_buffer)

    # Get table resource ID from log entry

    table_id = log_entry['protoPayload']['serviceData']['setIamPolicyRequest'][
        'resource']
    project_id = log_entry['resource']['labels']['project_id']

    # Check our project_id against the project list set at deployment
    if check_list(project_id):
        logging.info(
            f'The project {project_id} is not in the allowlist, is in the denylist, or a list is not fully configured. Continuing evaluation.'
        )

        # Create the fully-qualified table ID in standard SQL format
        # Split the table_id into a list of strings
        table_list = table_id.split('/')
        # Set our list of strings to remove
        remove_strings = ["projects", "datasets", "tables"]
        # Remove all strings in list to create a list of table_id values
        table_id = [i for i in table_list if i not in remove_strings]
        # Create the fully-qualified table ID in standard SQL format using the leftover values
        table_id = '.'.join(table_id)

        # Get the table ref
        table_ref = client.get_table(table_id)

        # Get the current BigQuery Table Policy
        table_policy = get_table_policy(client, table_ref)

        # Generate a new policy without public members
        new_policy = validate_table_policy(table_policy, table_id)

        if new_policy:
            logging.info(f'Found public members on BQ table: {table_id}.')

            finding_type = "public_bigquery_table"
            # Set our pub/sub message
            message = f"Found public members on BigQuery table: {table_id} in project: {project_id}."
            # Publish message to Pub/Sub
            logging.info(f'Publishing message to Pub/Sub.')
            try:
                publish_message(finding_type, mode, table_id, project_id,
                                message, topic_id)
                logging.info(f'Published message to {topic_id}')
            except:
                logging.error(f'Could not publish message to {topic_id}')
                raise
            if mode == "write":
                logging.info(
                    f'Lockdown is in write mode. Updating BigQuery table: {table_id} with new table policy.'
                )
                # Updates BQ table with private table policy
                update_table_policy(new_policy, client, table_ref, table_id)
            if mode == "read":
                logging.info(
                    'Lockdown is in read-only mode. Taking no action.')
        else:
            logging.info(
                f"The BigQuery Table: {table_id} is not public facing.")
    else:
        logging.info(
            f'The project {project_id} is in the allowlist or is not in the denylist. No action being taken.'
        )
Ejemplo n.º 4
0
def pubsub_trigger(data, context):
    """
    Used with Pub/Sub trigger method to evaluate the compute image IAM policy for public members.
    """

    # Integrates cloud logging handler to python logging
    create_logger()
    logging.info(
        'Received compute image IAM update log from Pub/Sub. Checking for public members.'
    )

    # Determine if CFN is running in view-only mode
    try:
        mode = getenv('MODE')
    except:
        logging.error('Mode not found in environment variable.')

    # Determine alerting Pub/Sub topic
    try:
        topic_id = getenv('TOPIC_ID')
    except:
        logging.error('Topic ID not found in environment variable.')

    # Create compute client to make API calls
    compute_client = create_service()

    # Converting log to json
    data_buffer = base64.b64decode(data['data'])
    log_entry = json.loads(data_buffer)

    # Get image ID from log event
    image_id = log_entry['protoPayload']['resourceName']

    # Create the image resource ID
    # Split the image_id into a list of strings
    image_list = image_id.split('/')
    # Set our list of strings to remove
    remove_strings = ["projects", "global", "images"]
    # Remove all strings in list to create a list project_id and image_id
    project_image_id = [i for i in image_list if i not in remove_strings]
    # Create the project_id and image_id variables
    project_id = project_image_id[0]
    image_id = project_image_id[1]

    # Check our project_id against the project list set at deployment
    if check_list(project_id):
        logging.info(
            f'The project {project_id} is not in the allowlist, is in the denylist, or a list is not fully configured. Continuing evaluation.'
        )

        # Get the compute image IAM policy
        policy = get_iam_policy(compute_client, image_id, project_id)

        # Generate a new policy without public members
        new_policy = eval_iam_policy(policy, image_id, project_id)

        if new_policy:
            finding_type = "public_gce_image"
            # Set our pub/sub message
            message = f"Found public members on GCE image: {image_id} in project: {project_id}."
            # Publish message to Pub/Sub
            logging.info(f'Publishing message to Pub/Sub.')
            try:
                publish_message(finding_type, mode, image_id, project_id,
                                message, topic_id)
                logging.info(f'Published message to {topic_id}')
            except:
                logging.error(f'Could not publish message to {topic_id}')
                raise
            if mode == "write":
                logging.info(
                    f"Lockdown is in write mode. Updating compute image: {image_id} with new IAM policy."
                )
                # Updates compute image with private IAM policy
                set_iam_policy(new_policy, compute_client, image_id,
                               project_id)
            if mode == "read":
                logging.info(
                    'Lockdown is in read-only mode. Taking no action.')
        else:
            logging.info(
                f"The compute image: {image_id} is not public facing.")
    else:
        logging.info(
            f'The project {project_id} is in the allowlist or is not in the denylist. No action being taken.'
        )
Ejemplo n.º 5
0
def pubsub_trigger(data, context):
    """
    Used with Pub/Sub trigger method to evaluate the GCE instance's service account.
    """

    # Integrates cloud logging handler to python logging
    create_logger()
    logging.info(
        'Received GCE instance creation log from Pub/Sub. Checking for default service account.'
    )

    # Determine if CFN is running in view-only mode
    try:
        mode = getenv('MODE')
    except:
        logging.error('Mode not found in environment variable.')

    # Determine our Pub/Sub alerting topic
    try:
        topic_id = getenv('TOPIC_ID')
    except:
        logging.error('Topic ID not found in environment variable.')

    # Create compute client to make API calls
    compute_client = create_service()

    # Converting log to json
    data_buffer = base64.b64decode(data['data'])
    log_entry = json.loads(data_buffer)

    # Get the required API paramaters from log event
    ## instance_id contains our project, zone, and instance_name (all needed to make API calls)
    instance_id = log_entry['protoPayload']['resourceName']

    # Split the instance_id into a list of strings
    instance_list = instance_id.split('/')
    # Set our list of strings to remove
    remove_strings = ["projects", "zones", "instances"]
    # Remove all strings in list to get or variables
    project_zone_instance = [
        i for i in instance_list if i not in remove_strings
    ]
    # Create the variables needed for API calls
    project_id = project_zone_instance[0]
    zone = project_zone_instance[1]
    instance_name = project_zone_instance[2]

    # Check our project_id against the project list set at deployment
    if check_list(project_id):
        logging.info(
            f'The project {project_id} is not in the allowlist, is in the denylist, or a list is not fully configured. Continuing evaluation.'
        )

        # Retrieves the GCE metadata.
        gce_info = get_gce_info(compute_client, instance_name, zone,
                                project_id)

        # Determine the service account assigned to the GCE instance.
        gce_sa = eval_gce_info(gce_info, instance_name, project_id)

        if gce_sa:
            finding_type = "gce_default_sa"
            # Set our pub/sub message
            message = f"The GCE instance: {instance_name} in project: {project_id} is using the default compute service account."
            # Publish message to Pub/Sub
            logging.info(f'Publishing message to Pub/Sub.')
            try:
                publish_message(finding_type, mode, instance_name, project_id,
                                message, topic_id)
                logging.info(f'Published message to {topic_id}')
            except:
                logging.error(f'Could not publish message to {topic_id}')
                raise
            if mode == "write":
                logging.info(
                    f"Lockdown is in write mode. Stopping the GCE instance {instance_name} in project {project_id}."
                )
                stop_gce_instance(compute_client, instance_name, zone,
                                  project_id)
            if mode == "read":
                logging.info(
                    f"Lockdown is in read-only mode. Taking no action on the GCE instance: {instance_name} in project {project_id} for using the default compute service account. "
                )
    else:
        logging.info(
            f'The project {project_id} is in the allowlist or is not in the denylist. No action being taken.'
        )
Ejemplo n.º 6
0
def pubsub_trigger(data, context):
    """
    Used with Pub/Sub trigger method to evaluate the BigQuery Dataset for public members.
    """

    # Integrates cloud logging handler to python logging
    create_logger()
    logging.info(
        'Received BigQuery dataset permissions update log from Pub/Sub. Checking for public access.'
    )

    # Determine if CFN is running in view-only mode
    try:
        mode = getenv('MODE')
    except:
        logging.error('Mode not found in environment variable.')

    # Determine alerting Pub/Sub topic
    try:
        topic_id = getenv('TOPIC_ID')
    except:
        logging.error('Topic ID not found in environment variable.')

    #Create BigQuery Client
    client = bigquery.Client()

    # Converting log to json
    data_buffer = base64.b64decode(data['data'])
    log_entry = json.loads(data_buffer)

    # Get dataset ID and project ID from log event
    dataset_log = log_entry['resource']['labels']['dataset_id']
    project_id = log_entry['resource']['labels']['project_id']

    # Check our project_id against the project list set at deployment
    if check_list(project_id):
        logging.info(
            f'The project {project_id} is not in the allowlist, is in the denylist, or a list is not fully configured. Continuing evaluation.'
        )

        # Create the fully-qualified dataset ID in standard SQL format
        data_vars = [project_id, dataset_log]
        dataset_id = '.'.join(data_vars)

        try:
            # Create Dataset object
            dataset = client.get_dataset(dataset_id)
        except:
            logging.info(f'Could not access BigQuery Dataset {dataset_id}')

        # Evaluate the dataset
        private_access_entry = eval_dataset(dataset, dataset_log, project_id)

        if private_access_entry:
            finding_type = "public_bigquery_dataset"
            # Set our pub/sub message
            message = f"Found public members on bigquery dataset: {dataset_log} in project: {project_id}."
            # Publish message to Pub/Sub
            logging.info('Publishing message to Pub/Sub.')
            try:
                publish_message(finding_type, mode, dataset_log, project_id,
                                message, topic_id)
                logging.info(f'Published message to {topic_id}')
            except:
                logging.error(f'Could not publish message to {topic_id}')
                raise
            # if the function is running in "write" mode, remove public members
            if mode == "write":
                logging.info(
                    'Lockdown is in write mode. Removing public IAM members from dataset.'
                )
                # Remove public members
                update_dataset(client, private_access_entry, dataset_id,
                               dataset)
            # if function is in read mode, take no action and publish message to pub/sub
            if mode == "read":
                logging.info(
                    'Lockdown is in read-only mode. Taking no action.')
        else:
            logging.info(
                f'No public members found. Taking no action on BigQuery dataset: {dataset_id}'
            )
    else:
        logging.info(
            f'The project {project_id} is in the allowlist or is not in the denylist. No action being taken.'
        )
Ejemplo n.º 7
0
def pubsub_trigger(data, context):
    """
    Used with Pub/Sub trigger method to evaluate the SSL policy for a weak TLS.
    """

    # Integrates cloud logging handler to python logging
    create_logger()
    logging.info(
        'Received SSL policy log from Pub/Sub. Checking for weak TLS.')

    # Determine if CFN is running in view-only mode
    try:
        mode = getenv('MODE')
    except:
        logging.error('Mode not found in environment variable.')

    # Determine alerting Pub/Sub topic
    try:
        topic_id = getenv('TOPIC_ID')
    except:
        logging.error('Topic ID not found in environment variable.')

    # Create compute client to make API calls
    compute_client = create_service()

    # Converting log to json
    data_buffer = base64.b64decode(data['data'])
    log_entry = json.loads(data_buffer)

    ssl_policy = log_entry['protoPayload']['resourceName']

    # Create the ssl policy resource ID
    # Split the ssl_policy into a list of strings
    ssl_list = ssl_policy.split('/')
    # Set our list of strings to remove
    remove_strings = ["projects", "global", "sslPolicies"]
    # Remove all strings in list to create a list project_id and ssl_policy
    ssl_project_id = [i for i in ssl_list if i not in remove_strings]
    # Create the project_id and ssl_policy variables
    project_id = ssl_project_id[0]
    ssl_policy = ssl_project_id[1]

    # Check our project_id against the project list set at deployment
    if check_list(project_id):
        logging.info(
            f'The project {project_id} is not in the allowlist, is in the denylist, or a list is not fully configured. Continuing evaluation.'
        )

        # Capture the SSL Policy's metadata
        ssl_description = get_ssl_policy(project_id, ssl_policy,
                                         compute_client)

        # Only return the tls_version variable if it is TLS 1.0 (weak)
        tls_version = analyze_ssl_policy(ssl_description, project_id,
                                         compute_client, ssl_policy)

        # if the variable tls_version exists, update to TLS 1.1
        if tls_version:
            finding_type = "weak_ssl_policy"
            # Set our pub/sub message
            message = f"Found weak TLS 1.0 on SSL policy: {ssl_policy} in project: {project_id}."
            # Publish message to Pub/Sub
            logging.info(f'Publishing message to Pub/Sub.')
            try:
                publish_message(finding_type, mode, ssl_policy, project_id,
                                message, topic_id)
                logging.info(f'Published message to {topic_id}')
            except:
                logging.error(f'Could not publish message to {topic_id}')
                raise
            if mode == "write":
                logging.info(
                    f'Lockdown is in write mode. Updating SSL policy: {ssl_policy} with TLS 1.1."'
                )
                update_ssl_policy(compute_client, ssl_description, project_id,
                                  ssl_policy)
            if mode == "read":
                logging.info(
                    'Lockdown is in read-only mode. Taking no action.')
        else:
            logging.info(f"The SSL policy {ssl_policy} is not using weak TLS.")
    else:
        logging.info(
            f'The project {project_id} is in the allowlist or is not in the denylist. No action being taken.'
        )
Ejemplo n.º 8
0
def pubsub_trigger(data, context):
    """
    Used with Pub/Sub trigger method to evaluate the GKE cluster for legacy ABAC.
    """

    # Integrates cloud logging handler to python logging
    create_logger()
    logging.info(
        'Received GKE cluster log from Pub/Sub. Checking for legacy ABAC.')

    # Determine if CFN is running in view-only mode
    try:
        mode = getenv('MODE')
    except:
        logging.error('Mode not found in environment variable.')

    # Determine alerting Pub/Sub topic
    try:
        topic_id = getenv('TOPIC_ID')
    except:
        logging.error('Topic ID not found in environment variable.')

    # Create our GKE client
    container_client = container.ClusterManagerClient()

    # Converting log to json
    data_buffer = base64.b64decode(data['data'])
    log_entry = json.loads(data_buffer)

    # Set our Cloud Logging variables
    cluster_id = log_entry['protoPayload']['resourceName']
    project_id = log_entry['resource']['labels']['project_id']
    cluster_name = log_entry['resource']['labels']['cluster_name']
    api_action = log_entry['protoPayload']['methodName']

    # Check our project_id against the project list set at deployment
    if check_list(project_id):
        logging.info(
            f'The project {project_id} is not in the allowlist, is in the denylist, or a list is not fully configured. Continuing evaluation.'
        )

        # There are two different log events this function can be triggered with.
        # One is the creation of a cluster in which we need to get the clusters data.
        if api_action == "google.container.v1beta1.ClusterManager.CreateCluster":
            # Get cluster details to begin evaluation logic
            cluster_details = get_cluster_details(container_client, cluster_id)
            # Check to see if legacy ABAC is enabled
            abac_value = check_legacy_abac(cluster_details, cluster_id)
        # The other log event is when a cluster's legacy auth setting is updated
        # we can get enabled or disabled from the log event and do not need to get the cluster details
        # This cloud function shouldn't be invocated if the API event was to disable legacy ABAC
        # The logic below will always be set to enabled == true unless the log sink query is changed
        # By adding this check we can skip getting the clusters data and go right to disable_legacy_abac()
        if api_action == "google.container.v1.ClusterManager.SetLegacyAbac":
            logging.info(
                f"GKE cluster: {cluster_name} was updated to enable legacy ABAC."
            )
            abac_value = "enabled"

        if abac_value:
            finding_type = "gke_legacy_abac"
            # Set our pub/sub message
            message = f"GKE cluster: {cluster_id} has legacy ABAC enabled."
            # Publish message to Pub/Sub
            logging.info('Publishing message to Pub/Sub.')
            try:
                publish_message(finding_type, mode, cluster_name, project_id,
                                message, topic_id)
                logging.info(f'Published message to {topic_id}')
            except:
                logging.error(f'Could not publish message to {topic_id}')
                raise
            if mode == "write":
                logging.info("Lockdown is in write mode.")
                # update GKE cluster
                disable_legacy_abac(container_client, cluster_id)
            if mode == "read":
                logging.info(
                    'Lockdown is in read-only mode. Taking no action.')
    else:
        logging.info(
            f'The project {project_id} is in the allowlist or is not in the denylist. No action being taken.'
        )
Ejemplo n.º 9
0
def pubsub_trigger(data, context):
    """
    Used with Pub/Sub trigger method to evaluate the Pub/Sub topic permissions for public members.
    """

    # Integrates cloud logging handler to python logging
    create_logger()
    logging.info(
        'Received Pub/Sub topic update log from Pub/Sub. Checking for public members.'
    )

    # Determine if CFN is running in view-only mode
    try:
        mode = getenv('MODE')
    except:
        logging.error('Mode not found in environment variable.')

    # Determine alerting Pub/Sub topic
    try:
        topic_id = getenv('TOPIC_ID')
    except:
        logging.error('Topic ID not found in environment variable.')

    # Converting log to json
    data_buffer = base64.b64decode(data['data'])
    log_entry = json.loads(data_buffer)

    # Parse project id and resource name for allow/deny list checking and remediation
    resource_name = log_entry['protoPayload']['resourceName'].split('/')
    project_id = resource_name[1]
    topic_to_evaluate = resource_name[-1]

    # Check our project_id against the project list set at deployment
    if check_list(project_id):
        logging.info(
            f'The project {project_id} is not in the allowlist, is in the denylist, or a list is not fully configured. Continuing evaluation.'
        )

        # Create PubSub Client
        client = pubsub_v1.PublisherClient()
        topic_path = client.topic_path(project_id, topic_to_evaluate)

        # Get a copy of the current policy
        try:
            policy = client.get_iam_policy(request={"resource": topic_path})
        except:
            logging.error(
                f'Could not retrieve current policy from {topic_to_evaluate}')

        # Evaluate policy and return clean policy without public bindings
        all_users_found, new_policy = eval_iam_policy(client, policy)

        # If a public binding was found, we need to remediate.
        if all_users_found:
            finding_type = "public_topic_policy"
            # Set our pub/sub message
            message = f"Found public members on PubSub Topic: {topic_to_evaluate} in project: {project_id}."
            # Publish message to Pub/Sub
            logging.info(f'Publishing message to Pub/Sub.')
            try:
                publish_message(finding_type, mode, topic_to_evaluate,
                                project_id, message, topic_id)
                logging.info(f'Published message to {topic_id}')
            except:
                logging.error(f'Could not publish message to {topic_id}')
                raise
            if mode == "write":
                logging.info(
                    f"Lockdown is in write mode. Updating topic policy: {topic_to_evaluate} with new IAM policy."
                )
                # Updates topic with private IAM policy
                set_iam_policy(new_policy, client, topic_path, project_id)
            if mode == "read":
                logging.info(
                    'Lockdown is in read-only mode. Taking no action.')
        else:
            logging.info(
                f"Did not find public members on PubSub Topic: {topic_to_evaluate} in project: {project_id}."
            )

    else:
        logging.info(
            f'The project {project_id} is in the allowlist or is not in the denylist. No action being taken.'
        )