def write_policy(input_file, minimize, minimize_length, fmt, verbose): """ Write least-privilege IAM policies, restricting all actions to resource ARNs. """ if verbose: log_level = getattr(logging, verbose.upper()) set_stream_logger(level=log_level) if input_file: cfg = read_yaml_file(input_file) else: try: cfg = yaml.safe_load(sys.stdin) except yaml.YAMLError as exc: logger.critical(exc) sys.exit() min_length = None if minimize: min_length = minimize_length policy = write_policy_with_template(cfg, min_length) if fmt == "yaml": policy_str = yaml.dump(policy, sort_keys=False) else: indent = 4 if fmt == "json" else None policy_str = json.dumps(policy, indent=indent) if fmt == "terraform": obj = {'policy': policy_str} policy_str = json.dumps(obj) print(policy_str)
def test_rds_policy_read_only(self): """test_rds_policy_read_only: Make sure that the RDS Policies work properly""" policy_file_path = os.path.abspath( os.path.join( os.path.dirname(__file__), os.path.pardir, os.path.pardir, "examples", "yml", "crud-rds-read.yml", ) ) cfg = read_yaml_file(policy_file_path) desired_output = { "Version": "2012-10-17", "Statement": [ { "Sid": "RdsReadDb", "Effect": "Allow", "Action": [ "rds:DownloadDBLogFilePortion", "rds:ListTagsForResource" ], "Resource": [ "arn:aws:rds:us-east-1:123456789012:db:mydatabase" ] } ] } policy = write_policy_with_template(cfg) print(json.dumps(policy, indent=4)) self.assertDictEqual(desired_output, policy)
def write_policy(input_file, crud, minimize): """ Write a least-privilege IAM Policy by supplying either a list of actions or access levels specific to resource ARNs! """ db_session = connect_db(DATABASE_FILE_PATH) if input_file: cfg = read_yaml_file(input_file) else: try: cfg = yaml.safe_load(sys.stdin) except yaml.YAMLError as exc: print(exc) sys.exit() # User supplies file containing resource-specific access levels if crud: policy = write_policy_with_access_levels(db_session, cfg, minimize) # User supplies file containing a list of IAM actions else: policy = write_policy_with_actions(db_session, cfg, minimize) print(json.dumps(policy, indent=4)) return policy
def test_eks_gh_155(self): """test_eks_gh_155: Test EKS issue raised in GH-155""" template_file_path = os.path.abspath( os.path.join( os.path.dirname(__file__), os.path.pardir, "files", "eks-service-wide.yml", ) ) cfg = read_yaml_file(template_file_path) result = write_policy_with_template(cfg) print(json.dumps(result, indent=4)) expected_results = { "Version": "2012-10-17", "Statement": [ { "Sid": "MultMultNone", "Effect": "Allow", "Action": [ "eks:ListClusters", "eks:CreateCluster" ], "Resource": [ "*" ] } ] } self.assertDictEqual(result, expected_results)
def write_policy_dir(input_dir, output_dir, minimize, log_level): """ write_policy, but this time with an input directory of YML/YAML files, and an output directory for all the JSON files """ set_log_level(logger, log_level) db_session = connect_db(DATABASE_FILE_PATH) input_dir = os.path.abspath(input_dir) output_dir = os.path.abspath(output_dir) if not minimize: logger.warning( "Note: --minimize option is not set. If the policy is too large, " "it can hit the AWS IAM Policy character limit. " "We'll execute as-is, but try using `--minimize 0` functionality " "for production to optimize policy size.\n") # Construct the path # Get the list of files # Write a list of the names if not check_valid_file_path(input_dir): logger.critical("Input directory is invalid") sys.exit() if not check_valid_file_path(output_dir): logger.critical("Output directory is invalid") sys.exit() input_files = glob.glob(str(input_dir + "/*.yml"), recursive=False) if not input_files: logger.critical( "Directory is empty or does not have files with *.yml extension. " "Please check the folder contents and/or extension spelling.") logger.info("Writing the policy JSON files from %s to %s...\n", input_dir, output_dir) for yaml_file in input_files: # Get the name of the file, and strip the extension. This is what the # policy name will be base_name = os.path.basename(yaml_file) base_name_no_extension = os.path.splitext( os.path.basename(yaml_file))[0] cfg = read_yaml_file(yaml_file) policy = write_policy_with_template(db_session, cfg, minimize) logger.info("Writing policy for %s\n", base_name) target_file = str(output_dir + "/" + base_name_no_extension + ".json") if os.path.exists(target_file): logger.info( "Target file for %s.json exists in the target directory. " "Removing it and writing a new file.\n", target_file, ) os.remove(target_file) write_json_file(target_file, policy) logger.info("Finished")
def get_action_access_level_overrides_from_yml( service, access_level_overrides_file_path=None): """ Read the YML overrides file, which is formatted like: ['ec2']['permissions-management'][action_name]. Since the AWS Documentation is sometimes outdated, we can use this YML file to override whatever they provide in their documentation. """ if not access_level_overrides_file_path: access_level_overrides_file_path = BUNDLED_ACCESS_OVERRIDES_FILE cfg = read_yaml_file(access_level_overrides_file_path) if service in cfg: return cfg[service] else: return False
def write_policy(input_file, minimize): """ Write least-privilege IAM policies, restricting all actions to resource ARNs. """ if input_file: cfg = read_yaml_file(input_file) else: try: cfg = yaml.safe_load(sys.stdin) except yaml.YAMLError as exc: logger.critical(exc) sys.exit() policy = write_policy_with_template(cfg, minimize) print(json.dumps(policy, indent=4))
def get_action_access_level_overrides_from_yml( service, access_level_overrides_file_path=None): """ Read the YML overrides file, which is formatted like: ['ec2']['permissions-management'][action_name]. Since the AWS Documentation is sometimes outdated, we can use this YML file to override whatever they provide in their documentation. """ if not access_level_overrides_file_path: access_level_overrides_file_path = os.path.join(Path(os.path.dirname(__file__)).parent) + \ '/shared/data/access-level-overrides.yml' cfg = read_yaml_file(access_level_overrides_file_path) if service in cfg: return cfg[service] else: return False
def test_add_wildcard_only_actions_matching_services_and_access_level( self): """test_add_wildcard_only_actions_matching_services_and_access_level: We'd never write a policy like this IRL but doing this as a quality check against how it handles the database """ policy_file_path = os.path.abspath( os.path.join( os.path.dirname(__file__), os.path.pardir, os.path.pardir, "examples", "yml", "crud-with-wildcard-service-level.yml", )) cfg = read_yaml_file(policy_file_path) output = write_policy_with_template(cfg) desired_output = { "Version": "2012-10-17", "Statement": [{ "Sid": "MultMultNone", "Effect": "Allow", "Action": [ "ram:EnableSharingWithAwsOrganization", "ram:GetResourcePolicies", "secretsmanager:CreateSecret", "ecr:GetAuthorizationToken", "s3:GetAccessPoint", "s3:GetAccountPublicAccessBlock", "s3:ListAccessPoints", "s3:ListJobs" ], "Resource": ["*"] }, { "Sid": "S3PermissionsmanagementBucket", "Effect": "Allow", "Action": [ "s3:DeleteBucketPolicy", "s3:PutBucketAcl", "s3:PutBucketPolicy", "s3:PutBucketPublicAccessBlock" ], "Resource": ["arn:aws:s3:::example-org-s3-access-logs"] }] } print(json.dumps(output, indent=4)) self.maxDiff = None self.assertDictEqual(output, desired_output)
def test_dynamodb_arn_policy_gh_215(self): """test_dynamodb_arn_matching_gh_215: Test writing a policy with DynamoDB""" template_file_path = os.path.abspath( os.path.join( os.path.dirname(__file__), os.path.pardir, "files", "dynamodb_gh_215.yml", )) cfg = read_yaml_file(template_file_path) results = write_policy_with_template(cfg) print(json.dumps(results, indent=4)) expected_statement_ids = [ "MultMultNone", "DynamodbReadTable", "DynamodbWriteTable", ] for statement in results.get("Statement"): self.assertTrue(statement.get("Sid") in expected_statement_ids)
def write_policy(input_file, minimize, log_level): """ Write a least-privilege IAM Policy by supplying either a list of actions or access levels specific to resource ARNs! """ set_log_level(logger, log_level) db_session = connect_db(DATABASE_FILE_PATH) if input_file: cfg = read_yaml_file(input_file) else: try: cfg = yaml.safe_load(sys.stdin) except yaml.YAMLError as exc: logger.critical(exc) sys.exit() policy = write_policy_with_template(db_session, cfg, minimize) print(json.dumps(policy, indent=4))
def test_add_wildcard_only_actions_matching_services_and_access_level( self): """test_add_wildcard_only_actions_matching_services_and_access_level: We'd never write a policy like this IRL but doing this as a quality check against how it handles the database """ policy_file_path = abspath( join( dirname(__file__), pardir + "/" + pardir + "/examples/yml/crud-with-wildcard-service-level.yml", )) cfg = read_yaml_file(policy_file_path) output = write_policy_with_template(db_session, cfg) print(json.dumps(output, indent=4)) desired_output = { "Version": "2012-10-17", "Statement": [{ "Sid": "MultMultNone", "Effect": "Allow", "Action": [ "ecr:GetAuthorizationToken", "s3:GetAccessPoint", "s3:GetAccountPublicAccessBlock", "s3:ListAccessPoints" ], "Resource": ["*"] }, { "Sid": "S3PermissionsmanagementBucket", "Effect": "Allow", "Action": [ "s3:DeleteBucketPolicy", "s3:PutBucketAcl", "s3:PutBucketPolicy", "s3:PutBucketPublicAccessBlock" ], "Resource": ["arn:aws:s3:::example-org-s3-access-logs"] }] } self.assertDictEqual(output, desired_output)
def test_rds_policy_read_only(self): """test_rds_policy_read_only: Make sure that the RDS Policies work properly""" policy_file_path = os.path.abspath( os.path.join( os.path.dirname(__file__), os.path.pardir, os.path.pardir, "examples", "yml", "crud-rds-read.yml", )) cfg = read_yaml_file(policy_file_path) desired_output = { "Version": "2012-10-17", "Statement": [{ "Sid": "RdsReadDb", "Effect": "Allow", "Action": [ "rds:DownloadCompleteDBLogFile", "rds:DownloadDBLogFilePortion", "rds:ListTagsForResource" ], "Resource": ["arn:aws:rds:us-east-1:123456789012:db:mydatabase"] }] } expected_actions = [ "rds:DownloadCompleteDBLogFile", "rds:DownloadDBLogFilePortion", "rds:ListTagsForResource" ] output = write_policy_with_template(cfg) print(json.dumps(output, indent=4)) expected_statement_ids = ["RdsReadDb"] for statement in output.get("Statement"): self.assertTrue(statement.get("Sid") in expected_statement_ids) for action in expected_actions: self.assertTrue(action in output["Statement"][0]["Action"])
def write_policy(input_file, minimize, fmt): """ Write least-privilege IAM policies, restricting all actions to resource ARNs. """ if input_file: cfg = read_yaml_file(input_file) else: try: cfg = yaml.safe_load(sys.stdin) except yaml.YAMLError as exc: logger.critical(exc) sys.exit() policy = write_policy_with_template(cfg, minimize) if fmt == "yaml": policy_str = yaml.dump(policy, sort_keys=False) else: indent = 4 if fmt == "json" else None policy_str = json.dumps(policy, indent=indent) if fmt == "terraform": obj = {'policy': policy_str} policy_str = json.dumps(obj) print(policy_str)
def load_report_config_file(filename): """Read the Report config file and return the rendered dict""" report_config_file = read_yaml_file(filename) return report_config_file
def test_dynamodb_arn_policy_gh_215(self): """test_dynamodb_arn_matching_gh_215: Test writing a policy with DynamoDB""" template_file_path = os.path.abspath( os.path.join( os.path.dirname(__file__), os.path.pardir, "files", "dynamodb_gh_215.yml", ) ) cfg = read_yaml_file(template_file_path) results = write_policy_with_template(cfg) print(json.dumps(results, indent=4)) expected_results = { "Version": "2012-10-17", "Statement": [ { "Sid": "MultMultNone", "Effect": "Allow", "Action": [ "dynamodb:DescribeLimits", "dynamodb:DescribeReservedCapacity", "dynamodb:DescribeReservedCapacityOfferings", "dynamodb:ListStreams", "dynamodb:ListBackups", "dynamodb:ListContributorInsights", "dynamodb:ListGlobalTables", "dynamodb:ListTables" ], "Resource": [ "*" ] }, { "Sid": "DynamodbReadTable", "Effect": "Allow", "Action": [ "dynamodb:BatchGetItem", "dynamodb:ConditionCheckItem", "dynamodb:DescribeContinuousBackups", "dynamodb:DescribeContributorInsights", "dynamodb:DescribeTable", "dynamodb:DescribeTableReplicaAutoScaling", "dynamodb:DescribeTimeToLive", "dynamodb:GetItem", "dynamodb:ListTagsOfResource", "dynamodb:Query", "dynamodb:Scan" ], "Resource": [ "arn:aws:dynamodb:us-east-1:123456789123:table/mytable" ] }, { "Sid": "DynamodbWriteTable", "Effect": "Allow", "Action": [ "dynamodb:BatchWriteItem", "dynamodb:CreateBackup", "dynamodb:CreateGlobalTable", "dynamodb:CreateTable", "dynamodb:CreateTableReplica", "dynamodb:DeleteItem", "dynamodb:DeleteTable", "dynamodb:DeleteTableReplica", "dynamodb:PutItem", "dynamodb:RestoreTableFromBackup", "dynamodb:RestoreTableToPointInTime", "dynamodb:UpdateContinuousBackups", "dynamodb:UpdateContributorInsights", "dynamodb:UpdateGlobalTable", "dynamodb:UpdateGlobalTableSettings", "dynamodb:UpdateItem", "dynamodb:UpdateTable", "dynamodb:UpdateTableReplicaAutoScaling", "dynamodb:UpdateTimeToLive" ], "Resource": [ "arn:aws:dynamodb:us-east-1:123456789123:table/mytable" ] } ] } self.assertDictEqual(results, expected_results)
def test_rds_policy_read_write_list(self): """test_rds_policy_read_write_list: Make sure that the RDS Policies work properly for multiple levels""" policy_file_path = os.path.abspath( os.path.join( os.path.dirname(__file__), os.path.pardir, os.path.pardir, "examples", "yml", "crud-rds-mult.yml", ) ) cfg = read_yaml_file(policy_file_path) desired_output = { "Version": "2012-10-17", "Statement": [ { "Sid": "RdsReadDb", "Effect": "Allow", "Action": [ "rds:DownloadDBLogFilePortion", "rds:ListTagsForResource" ], "Resource": [ "arn:aws:rds:us-east-1:123456789012:db:mydatabase" ] }, { "Sid": "MultMultNone", "Effect": "Allow", "Action": [ "iam:PassRole" ], "Resource": [ "*" ] }, { "Sid": "RdsWriteDb", "Effect": "Allow", "Action": [ "rds:AddRoleToDBInstance", "rds:ApplyPendingMaintenanceAction", "rds:CreateDBInstance", "rds:CreateDBInstanceReadReplica", "rds:CreateDBSnapshot", "rds:DeleteDBInstance", "rds:DeregisterDBProxyTargets", "rds:ModifyDBInstance", "rds:PromoteReadReplica", "rds:RebootDBInstance", "rds:RemoveRoleFromDBInstance", "rds:RestoreDBInstanceFromDBSnapshot", "rds:RestoreDBInstanceFromS3", "rds:RestoreDBInstanceToPointInTime", "rds:StartDBInstance", "rds:StopDBInstance" ], "Resource": [ "arn:aws:rds:us-east-1:123456789012:db:mydatabase" ] }, { "Sid": "RdsListDb", "Effect": "Allow", "Action": [ "rds:DescribeDBLogFiles", "rds:DescribeDBProxyTargets", "rds:DescribeDBSnapshots", "rds:DescribePendingMaintenanceActions", "rds:DescribeValidDBInstanceModifications" ], "Resource": [ "arn:aws:rds:us-east-1:123456789012:db:mydatabase" ] } ] } expected_statement_ids = [ "RdsReadDb", "MultMultNone", "RdsWriteDb", "RdsListDb" ] policy = write_policy_with_template(cfg) for statement in policy.get("Statement"): self.assertTrue(statement.get("Sid") in expected_statement_ids)
def test_add_wildcard_only_actions_matching_services_and_access_level(self): """test_add_wildcard_only_actions_matching_services_and_access_level: We'd never write a policy like this IRL but doing this as a quality check against how it handles the database """ policy_file_path = os.path.abspath( os.path.join( os.path.dirname(__file__), os.path.pardir, os.path.pardir, "examples", "yml", "crud-with-wildcard-service-level.yml", ) ) cfg = read_yaml_file(policy_file_path) results = write_policy_with_template(cfg) # The Policy *should* look like this. # desired_output = { # "Version": "2012-10-17", # "Statement": [ # { # "Sid": "MultMultNone", # "Effect": "Allow", # "Action": [ # "ram:EnableSharingWithAwsOrganization", # "ram:GetResourcePolicies", # "ecr:GetAuthorizationToken", # "s3:GetAccessPoint", # "s3:GetAccountPublicAccessBlock", # "s3:ListAccessPoints", # "s3:ListJobs" # ], # "Resource": [ # "*" # ] # }, # { # "Sid": "S3PermissionsmanagementBucket", # "Effect": "Allow", # "Action": [ # "s3:DeleteBucketPolicy", # "s3:PutBucketAcl", # "s3:PutBucketPolicy", # "s3:PutBucketPublicAccessBlock" # ], # "Resource": [ # "arn:aws:s3:::example-org-s3-access-logs" # ] # } # ] # } print(json.dumps(results, indent=4)) self.maxDiff = None # To future-proof this unit test... # (1) check the Sid names sid_names = get_sid_names_from_policy(results) self.assertIn("MultMultNone", sid_names, "Sid is not in the list of expected Statement Ids") self.assertIn("S3PermissionsmanagementBucket", sid_names, "Sid is not in the list of expected Statement Ids") # (2) Check for the presence of certain actions that we know will be there statement_1 = get_statement_from_policy_using_sid(results, "MultMultNone") self.assertIn("ram:EnableSharingWithAwsOrganization", statement_1.get("Action")) self.assertIn("s3:GetAccountPublicAccessBlock", statement_1.get("Action")) statement_2 = get_statement_from_policy_using_sid(results, "S3PermissionsmanagementBucket") self.assertIn("s3:DeleteBucketPolicy", statement_2.get("Action")) self.assertIn("s3:PutBucketPolicy", statement_2.get("Action")) # (3) Check that the length of the list is at least the length that it is right now, # since we expect it to grow eventually self.assertTrue(len(statement_1.get("Action")) > 5) # Size is 6 at time of writing self.assertTrue(len(statement_2.get("Action")) > 3) # Size is 4 at time of writing
def write_policy_dir(input_dir, output_dir, crud, minimize): """ write_policy, but this time with an input directory of YML/YAML files, and an output directory for all the JSON files """ db_session = connect_db(DATABASE_FILE_PATH) input_dir = os.path.abspath(input_dir) output_dir = os.path.abspath(output_dir) if not crud: print( "Warning: If you are using ARNs from Terraform to generate your policies, " "try using the CRUD functionality instead of the default actions-based policy writing functionality." ) if not minimize: print( "Warning: --minimize option is not set. If the policy is too large, " "it can hit the AWS IAM Policy character limit. " "We'll execute as-is, but try using `--minimize 0` functionality " "for production to optimize policy size.\n") # Construct the path # Get the list of files # Write a list of the names if not check_valid_file_path(input_dir): print("Input directory is invalid") sys.exit() if not check_valid_file_path(output_dir): print("Output directory is invalid") sys.exit() input_files = glob.glob(str(input_dir + '/*.yml'), recursive=False) if not input_files: print( "Directory is empty or does not have files with *.yml extension. " "Please check the folder contents and/or extension spelling.") print("Writing the policy JSON files from " + input_dir + " to " + output_dir + "...\n") for yaml_file in input_files: # Get the name of the file, and strip the extension. This is what the # policy name will be base_name = os.path.basename(yaml_file) base_name_no_extension = os.path.splitext( os.path.basename(yaml_file))[0] cfg = read_yaml_file(yaml_file) # User supplies file containing resource-specific access levels if crud: policy = write_policy_with_access_levels(db_session, cfg, minimize) # User supplies file containing a list of IAM actions else: policy = write_policy_with_actions(db_session, cfg, minimize) print("Writing policy for " + base_name + '\n') target_file = str(output_dir + '/' + base_name_no_extension + '.json') if os.path.exists(target_file): print( "Target file for " + base_name_no_extension + '.json' + " exists in the target directory. Removing it and writing a new file.\n" ) os.remove(target_file) write_json_file(target_file, policy) print("Finished")