def get_trail_event_selectors(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, trail_client, cloud_trail): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "CloudTrail_EventSelectors_" + region_name + "_" + cloud_trail["Name"] + ".json") event_selectors = [] # Check Exist File リソース情報ファイル if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: event_selectors = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: event_selectors = CloudTrailUtils.get_event_selectors( trace_id, awsaccount, trail_client, region_name, cloud_trail["TrailARN"]) # Upload File リソース情報ファイル try: FileUtils.upload_s3(trace_id, event_selectors, s3_file_name, True) except PmError as e: pm_logger.error("[%s/%s] CloudTrailのイベントセレクタ情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) return event_selectors
def check_asc_item_copy_cis_check(trace_id, check_history_id, organization_id, project_id, aws_account, session, result_json_path, check_cis_item): cw_logger = common_utils.begin_cw_logger(trace_id, __name__, inspect.currentframe()) error_cis = LIST_ERROR_CIS[check_cis_item] try: try: check_result_items = pm_checkResultItems.query_by_check_history_id_and_check_item_code_and_aws_account( trace_id, check_history_id, check_cis_item, aws_account, is_cw_logger=True) except Exception as e: cw_logger.error("[%s] %sのチェック結果を取得できませんでした。", aws_account, error_cis) return CheckResult.Error # チェック結果が「マネージド-1」だった場合は、チェック結果ファイルは作られていないのでファイル取得はスキップします。 check_result = check_result_items[0]["CheckResult"] if check_result == CheckResult.MembersManagement: return check_result s3_file_name = CommonConst.PATH_CHECK_RESULT.format( check_history_id, organization_id, project_id, aws_account, check_cis_item + ".json") try: check_results_s3 = FileUtils.read_json(trace_id, CommonConst.S3_CHECK_BUCKET, s3_file_name, is_cw_logger=True) except PmError as e: cw_logger.error("[%s] %sのチェック結果ファイルを取得できませんでした。", aws_account, error_cis) return CheckResult.Error except Exception as e: cw_logger.error("[%s] チェック処理中にエラーが発生しました。", aws_account) return CheckResult.Error # Export File Json try: FileUtils.upload_s3(trace_id, check_results_s3, result_json_path, format_json=True, is_cw_logger=True) except Exception as e: cw_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) return CheckResult.Error # チェック結果 return check_result
def test_upload_s3_case_not_has_param_format_json(self): # mock method upload_json with patch('premembers.common.FileUtils.upload_json' ) as mock_method_upload_json: # mock method upload_csv with patch('premembers.common.FileUtils.upload_csv' ) as mock_method_upload_csv: # call function test FileUtils.upload_s3(trace_id, data_file, s3_file_name) # check param call function upload_json mock_method_upload_json.assert_not_called() # check param call function upload_csv mock_method_upload_csv.assert_called_once_with(trace_id, "S3_CHECK_BUCKET", data_file, s3_file_name)
def check_ibp_item_07_08(trace_id, check_history_id, organization_id, project_id, aws_account, session, result_json_path): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) check_results = [] # チェック結果 try: account_password_policy = ibp_item_common_logic.get_account_password_policy( trace_id, check_history_id, organization_id, project_id, aws_account, session, result_json_path) if (common_utils.check_key("AllowUsersToChangePassword", account_password_policy) is False or account_password_policy['AllowUsersToChangePassword'] is False): check_results.append(get_check_ibp_item_07_08_result()) except PmError as e: if e.cause_error.response['Error'][ 'Code'] == CommonConst.NO_SUCH_ENTITY: check_results.append(get_check_ibp_item_07_08_result()) else: pm_logger.error("[%s] チェック処理中にエラーが発生しました。", aws_account) return CheckResult.Error # Export File CHECK_IBP_ITEM_07_08.json try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_password_policy = { 'AWSAccount': aws_account, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_password_policy, result_json_path, True) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) return CheckResult.Error # チェック結果 if len(check_results) == 0: return CheckResult.Normal return CheckResult.CriticalDefect
def check_asc_item_13_01(trace_id, check_history_id, organization_id, project_id, aws_account, session, result_json_path): cw_logger = common_utils.begin_cw_logger(trace_id, __name__, inspect.currentframe()) check_results = [] is_authorized = True s3_client = S3Utils.get_s3_client(trace_id, session, aws_account, is_cw_logger=True) try: list_buckets = asc_item_common_logic.get_list_buckets( trace_id, check_history_id, organization_id, project_id, s3_client, aws_account) except PmError as e: return CheckResult.Error # 取得したS3バケットのアクセスコントロールリスト情報をS3に保存する(リソース情報ファイル)。 for bucket in list_buckets['Buckets']: bucket_name = bucket['Name'] region_name = None try: region_name = S3Utils.get_bucket_location( trace_id, s3_client, bucket_name, aws_account) if region_name is None: region_name = CommonConst.US_EAST_REGION bucket_encryption_rules = S3Utils.get_bucket_encryption( trace_id, s3_client, bucket_name, aws_account, region_name, is_cw_logger=True) except PmError as e: if e.cause_error.response['Error'][ 'Code'] in CommonConst.SERVER_SIDE_ENCRYPTION_CONFIGURATION_NOT_FOUND_ERROR: check_results.append(get_check_asc_item_13_01_result( region_name, bucket_name)) continue elif e.cause_error.response['Error'][ 'Code'] in CommonConst.S3_SKIP_EXCEPTION: error_operation = e.cause_error.operation_name, error_code = e.cause_error.response['Error']['Code'], error_message = e.cause_error.response['Error']['Message'] if region_name is None: region_name = CommonConst.ERROR check_results.append( asc_item_common_logic.get_error_authorized_result( region_name, bucket_name, error_operation, error_code, error_message)) is_authorized = False continue else: return CheckResult.Error if len(bucket_encryption_rules) == 0: continue try: s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_Encryption_" + region_name + "_" + bucket_name + ".json") FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", bucket_encryption_rules, s3_file_name, is_cw_logger=True) except PmError as e: cw_logger.error("[%s] S3バケット暗号化情報のS3保存に失敗しました。(%s/%s)", aws_account, region_name, bucket_name) return CheckResult.Error # チェック処理 try: for bucket_encryption_rule in bucket_encryption_rules: if (common_utils.check_key( "SSEAlgorithm", bucket_encryption_rule['ApplyServerSideEncryptionByDefault'] ) is False): check_results.append(get_check_asc_item_13_01_result( region_name, bucket_name)) break except Exception as e: cw_logger.error("[%s] チェック処理中にエラーが発生しました。(%s/%s)", aws_account, region_name, bucket_name) return CheckResult.Error # Export File json try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_asc_item_13_01 = { 'AWSAccount': aws_account, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_asc_item_13_01, result_json_path, format_json=True, is_cw_logger=True) except Exception as e: cw_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) return CheckResult.Error # チェック結果 if is_authorized is False: return CheckResult.Error if len(check_results) > 0: return CheckResult.CriticalDefect return CheckResult.Normal
def check_asc_item_12_01(trace_id, check_history_id, organization_id, project_id, aws_account, session, result_json_path): cw_logger = common_utils.begin_cw_logger(trace_id, __name__, inspect.currentframe()) check_results = [] is_authorized = True # 取得したクレデンシャル情報を使用して、S3クライアントを作成します。 try: s3_client = S3Utils.get_s3_client(trace_id, session, aws_account, is_cw_logger=True) except PmError as e: raise common_utils.write_log_pm_error(e, cw_logger) # S3バケット一覧を取得します。 try: list_buckets = S3Utils.list_buckets(trace_id, s3_client, aws_account, is_cw_logger=True) except PmError as e: return CheckResult.Error # S3バケット一覧情報をS3に保存します。 try: s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_ListBuckets.json") FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", list_buckets, s3_file_name, is_cw_logger=True) except PmError as e: cw_logger.error("[%s] S3バケット一覧情報のS3保存に失敗しました。", aws_account) return CheckResult.Error for bucket in list_buckets["Buckets"]: bucket_name = bucket['Name'] region_name = None try: # 取得したS3バケット一覧情報ファイルをもとに、各バケットのリージョンを取得する。 region_name = S3Utils.get_bucket_location(trace_id, s3_client, bucket_name, aws_account, is_cw_logger=True) if region_name is None: region_name = CommonConst.US_EAST_REGION # 取得したS3バケット一覧情報ファイルをもとに、該当のS3バケットのアクセスコントロールリストを取得する。 bucket_acl = S3Utils.get_bucket_acl(trace_id, s3_client, bucket_name, aws_account, region_name) # 取得したS3バケット情報ファイルをもとに、該当のS3バケットのバケットポリシーを取得する。 bucket_policy = S3Utils.get_bucket_policy(trace_id, s3_client, bucket_name, aws_account, region_name, is_cw_logger=True) if bucket_policy is None: continue except PmError as e: if e.cause_error.response['Error'][ 'Code'] in CommonConst.S3_SKIP_EXCEPTION: error_operation = e.cause_error.operation_name, error_code = e.cause_error.response['Error']['Code'], error_message = e.cause_error.response['Error']['Message'] if region_name is None: region_name = CommonConst.ERROR check_results.append( asc_item_common_logic.get_error_authorized_result( region_name, bucket_name, error_operation, error_code, error_message)) is_authorized = False continue else: return CheckResult.Error # 取得したS3バケットのアクセスコントロールリスト情報をS3に保存する。(アクセスコントロールリスト情報) try: s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_ACL_" + region_name + "_" + bucket_name + ".json") FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", bucket_acl, s3_file_name, is_cw_logger=True) except PmError as e: cw_logger.error("[%s] S3バケットACL情報のS3保存に失敗しました。(%s)/(%s)", aws_account, region_name, bucket_name) return CheckResult.Error # 取得したS3バケットのバケットポリシー情報をS3に保存する。(バケットポリシー情報) try: s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_Bucketpolicy_" + region_name + "_" + bucket_name + ".json") FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", bucket_policy, s3_file_name, is_cw_logger=True) except PmError as e: cw_logger.error("[%s] S3バケットポリシー情報のS3保存に失敗しました。(%s/%s)", aws_account, region_name, bucket_name) return CheckResult.Error # チェック処理 bucket_acl_abnormity = False bucket_policy_abnormity = False try: # Check-1. ACLによりAllUsersに操作権限が与えられたS3バケットが存在するか # Check-2. ACLによりAuthenticatedUsersに操作権限が与えられたS3バケットが存在するか for grant in bucket_acl["Grants"]: if (common_utils.check_key("URI", grant['Grantee'])): if grant['Grantee']["URI"] in ACL_URI: bucket_acl_abnormity = True break # Check-3. バケットポリシーのプリンシパルにて全てのユーザに操作権限が与えられたS3バケットが存在するか bucket_policy = ast.literal_eval(bucket_policy['Policy']) for statement in bucket_policy["Statement"]: if (statement["Effect"] == CommonConst.ALLOW and statement["Principal"] == CommonConst.ALL): bucket_policy_abnormity = True break if bucket_acl_abnormity is True or bucket_policy_abnormity is True: check_results.append( get_check_accessible_result(region_name, bucket_acl_abnormity, bucket_policy_abnormity, bucket_name)) except Exception as e: cw_logger.error("[%s] チェック処理中にエラーが発生しました。(%s/%s)", aws_account, region_name, bucket_name) return CheckResult.Error # Export File CHECK_ASC_ITEM_12_01.json try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_bucket = { 'AWSAccount': aws_account, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_bucket, result_json_path, format_json=True, is_cw_logger=True) except Exception as e: cw_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) return CheckResult.Error # チェック結果 if is_authorized is False: return CheckResult.Error if len(check_results) > 0: return CheckResult.CriticalDefect return CheckResult.Normal
def check_asc_item_07_01(trace_id, check_history_id, organization_id, project_id, aws_account, session, result_json_path): cw_logger = common_utils.begin_cw_logger(trace_id, __name__, inspect.currentframe()) check_results = [] try: regions = aws_common.get_regions(trace_id, session, is_cw_logger=True) except PmError as e: cw_logger.error("Regionの情報の取得に失敗しました。") raise common_utils.write_log_pm_error(e, cw_logger) for region in regions: region_name = region["RegionName"] if region_name in REGION_IGNORE: continue ec2_client = Ec2Utils.get_ec2_client(trace_id, session, region_name, aws_account, is_cw_logger=True) try: # EBS情報を取得する。 ebs_volumes = Ec2Utils.describe_volumes(trace_id, aws_account, ec2_client, region_name, is_cw_logger=True) except PmError as e: return CheckResult.Error # 取得件数が0件の場合、ログを出力し、次のリージョンの処理に進みます。 if (len(ebs_volumes) == 0): cw_logger.info("[%s/%s] EBSボリューム情報の取得件数が0でした。", aws_account, region_name) continue try: # 取得したEBS情報をS3に保存する(EBS情報ファイル)。 s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/EBS_Volumes_" + region_name + ".json") FileUtils.upload_s3(trace_id, ebs_volumes, s3_file_name, True, is_cw_logger=True) except PmError as e: cw_logger.error("[%s/%s] EBSボリューム情報情報のS3保存に失敗しました。", aws_account, region_name) return CheckResult.Error # チェックルール try: for ebs_volume in ebs_volumes: if (ebs_volume['Encrypted'] is False): check_result = get_check_asc_item_07_01_result( ebs_volume, region_name) check_results.append(check_result) except PmError as e: cw_logger.error("[%s/%s] チェック処理中にエラーが発生しました。", aws_account, region_name) return CheckResult.Error # Export File ASC/CHECK_ ASC_ITEM_07_01.json try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_acs_item_7_01 = { 'AWSAccount': aws_account, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_acs_item_7_01, result_json_path, format_json=True, is_cw_logger=True) except Exception as e: cw_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) return CheckResult.Error # チェック結果 if len(check_results) > 0: return CheckResult.CriticalDefect return CheckResult.Normal
def check_cis_metric(trace_id, check_history_id, organization_id, project_id, awsaccount, session, result_json_path, check_item_code, level_code, check_result): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) check_results = [] is_all_region_enable = False is_check5_successful = False # Get all region try: regions = aws_common.get_regions(trace_id, session) except PmError as e: pm_logger.error("Regionの情報の取得に失敗しました。") e.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(e, pm_logger) for region in regions: try: region_name = region["RegionName"] if region_name in REGION_IGNORE: continue # Default value check is_region_error = True try: cloud_trails = cis_item_common_logic.get_cloud_trails( trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, session) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) logs_client = LogsUtils.get_logs_client(trace_id, session, region_name, awsaccount) cloudwatch_client = CloudWatchlUtils.get_cloudwatch_client( trace_id, session, region_name, awsaccount) sns_client = SNSUtils.get_sns_client(trace_id, session, region_name, awsaccount) trail_client = CloudTrailUtils.get_trail_client( trace_id, session, region_name, awsaccount) for cloud_trail in cloud_trails: try: # Check-0. マルチリージョン設定されたCloudTrailが存在するか。 trail_status = cis_item_common_logic.get_trails_status( trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, trail_client, cloud_trail) if (len(trail_status) == 0): continue event_selectors = cis_item_common_logic.get_trail_event_selectors( trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, trail_client, cloud_trail) # 全リージョンに対し有効なCloudTrailが存在するか # ロギングが有効か if (is_all_region_enable is False and cloud_trail['IsMultiRegionTrail'] is True and trail_status['IsLogging'] is True): # すべての管理操作の書き込みが有効か for event_selector in event_selectors: if (event_selector['IncludeManagementEvents'] is True and event_selector['ReadWriteType'] == CommonConst.EVENT_SELECTOR_READ_WRITE_TYPE_ALL): is_all_region_enable = True break if (is_all_region_enable is False): continue # Check-1. CloudTrailのロググループに対してメトリクスは設定されているか。 if common_utils.check_key( 'CloudWatchLogsLogGroupArn', cloud_trail ) is False or not cloud_trail['CloudWatchLogsLogGroupArn']: continue # Check-2. 許可されていないAPIコールに対するメトリクスは設定されているか。 metric_filters = describe_metric_filters( trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, logs_client, cloud_trail['CloudWatchLogsLogGroupArn'].split(":")[6]) list_filter_pattern_result = get_list_filter_pattern_match_result( trace_id, check_item_code, metric_filters) if (not list_filter_pattern_result): continue # Check-3. メトリクスに対しアラームは設定されているか。 metric_alarms = describe_alarms( trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, cloudwatch_client) metric_names = [] for filter_pattern_result in list_filter_pattern_result: metric_names.extend( jmespath.search( 'metricTransformations[*].metricName', filter_pattern_result)) metric_alarms_matching = [] for metric_alarm in metric_alarms: pm_logger.info( "アラーム名=%s", common_utils.get_value("AlarmName", metric_alarm)) if (common_utils.check_key('MetricName', metric_alarm) is True and metric_alarm['MetricName'] in metric_names): metric_alarms_matching.append(metric_alarm) if not metric_alarms_matching: continue # Check-4. アラームからキックされたSNSに対し通知先は設定されているか。 for metric_alarm in metric_alarms_matching: for alarm_action in metric_alarm['AlarmActions']: if alarm_action.startswith(CommonConst.ARN_SNS): subscriptions = list_subscriptions_by_topic( trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, sns_client, alarm_action) if len(subscriptions) > 0: is_region_error = False break # Check-5. 設定されたCloudTrailは全Region有効なTrailか。 if (is_region_error is False and is_all_region_enable is True): is_check5_successful = True break except Exception as e: pm_logger.error("[%s/%s] チェック処理中にエラーが発生しました。", awsaccount, region_name) raise common_utils.write_log_exception(e, pm_logger) if is_check5_successful is True: check_results = [] break if (is_region_error is True): result = {'Region': region_name, 'Level': level_code} check_results.append(result) except Exception as e: pm_error = common_utils.write_log_exception(e, pm_logger) if not pm_error.pm_notification_error: pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, region=region_name, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) else: pm_error.pm_notification_error.check_item_code = check_item_code pm_error.pm_notification_error.aws_account = awsaccount pm_error.pm_notification_error.region = region_name raise common_utils.write_log_pm_error(pm_error, pm_logger) # 検出結果を1つのチェック結果JSONファイルに保存する。 try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_cis_item_3 = { 'AWSAccount': awsaccount, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_cis_item_3, result_json_path, True) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", awsaccount) pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) if len(check_results) > 0: return check_result return CheckResult.Normal
def check_asc_item_16_01(trace_id, check_history_id, organization_id, project_id, aws_account, session, result_json_path): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) check_results = [] is_authorized = True # 取得したクレデンシャル情報を使用して、S3クライアントを作成します。 try: s3_client = S3Utils.get_s3_client(trace_id, session, aws_account) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) # S3バケット一覧を取得します。 try: list_buckets = asc_item_common_logic.get_list_buckets( trace_id, check_history_id, organization_id, project_id, s3_client, aws_account) except PmError as e: return CheckResult.Error for bucket in list_buckets["Buckets"]: bucket_name = bucket['Name'] region_name = None try: # 取得したS3バケット一覧情報ファイルをもとに、各バケットのリージョンを取得する。 region_name = S3Utils.get_bucket_location(trace_id, s3_client, bucket_name, aws_account) if region_name is None: region_name = CommonConst.US_EAST_REGION # 取得したS3バケット情報ファイルをもとに、該当のS3バケットのアクセスコントロールリストを取得する。 bucket_acl = get_bucket_acl( trace_id, check_history_id, organization_id, project_id, aws_account, region_name, bucket_name, s3_client) # 取得したS3バケット情報をもとに、S3のロギング情報を取得する。 bucket_logging = S3Utils.get_bucket_logging( trace_id, aws_account, s3_client, bucket_name, region_name) except PmError as e: if e.cause_error.response['Error'][ 'Code'] in CommonConst.S3_SKIP_EXCEPTION: error_operation = e.cause_error.operation_name, error_code = e.cause_error.response['Error']['Code'], error_message = e.cause_error.response['Error']['Message'] if region_name is None: region_name = CommonConst.ERROR check_results.append( asc_item_common_logic.get_error_authorized_result( region_name, bucket_name, error_operation, error_code, error_message)) is_authorized = False continue else: return CheckResult.Error # 取得したS3ロギング情報をS3に保存する(リソース情報ファイル)。 try: s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_ClientLogging_" + region_name + "_" + bucket_name + ".json") FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", bucket_logging, s3_file_name) except PmError as e: pm_logger.error("[%s] S3バケットロギング情報の取得に失敗しました。(%s/%s)", aws_account, region_name, bucket_name) return CheckResult.Error # チェック処理 bucket_abnormity = True try: # Check-1. ACLによりLogDeliveryに操作権限が与えられたS3バケットが存在するか for grant in bucket_acl["Grants"]: if (common_utils.check_key("URI", grant['Grantee']) and grant['Grantee']["URI"] == LOG_DELIVERY_URI): bucket_abnormity = False break # Check-2. S3バケットでログ記録が有効になっていないものは存在するか if bucket_abnormity is True and len(bucket_logging) == 0: result = { 'Region': region_name, 'Level': CommonConst.LEVEL_CODE_21, 'DetectionItem': { 'BucketName': bucket_name } } check_results.append(result) except Exception as e: pm_logger.error("[%s] チェック処理中にエラーが発生しました。(%s/%s)", aws_account, region_name, bucket_name) return CheckResult.Error # Export File CHECK_ASC_ITEM_16_01.json try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_asc_item_16_01 = { 'AWSAccount': aws_account, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_asc_item_16_01, result_json_path, True) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) return CheckResult.Error # チェック結果 if is_authorized is False: return CheckResult.Error if len(check_results) > 0: return CheckResult.CriticalDefect return CheckResult.Normal
def execute_security_group_port(trace_id, check_history_id, organization_id, project_id, awsaccount, session, result_json_path, port, check_item_code, excluded_resources): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) check_results = [] # Export File VPC_SecurityGroups_{region}.json try: regions = aws_common.get_regions(trace_id, session) except PmError as e: pm_logger.error("Regionの情報の取得に失敗しました。") e.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(e, pm_logger) for region in regions: region_name = region["RegionName"] try: if region_name in REGION_IGNORE: continue ec2_client = Ec2Utils.get_ec2_client(trace_id, session, region_name, awsaccount) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "VPC_SecurityGroups_" + region_name + ".json") if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: security_groups = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: security_groups = Ec2Utils.describe_security_groups( trace_id, awsaccount, ec2_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) if (len(security_groups) == 0): pm_logger.info("[%s/%s] セキュリティグループ情報の取得件数が0でした。", awsaccount, region_name) continue try: FileUtils.upload_s3(trace_id, security_groups, s3_file_name, True) except PmError as e: pm_logger.error("[%s/%s] セキュリティグループ情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) try: for security_group in security_groups: # check excluded resources resource_name = security_group['GroupId'] if common_utils.check_excluded_resources( check_item_code, region_name, ResourceType.GroupId, resource_name, excluded_resources): continue for ip_permission in security_group['IpPermissions']: if ip_permission['IpProtocol'] != '-1': if ip_permission['IpProtocol'] != CommonConst.TCP: continue if common_utils.check_key( 'FromPort', ip_permission ) is False or ip_permission['FromPort'] > port: continue if common_utils.check_key( 'ToPort', ip_permission ) is False or ip_permission['ToPort'] < port: continue for ip_range in ip_permission['IpRanges']: if common_utils.check_key('CidrIp', ip_range): if (CommonConst.CIDR_IP_NOT_SECURITY == ip_range['CidrIp']): check_result = get_check_result( security_group, ip_permission, ip_range, region_name) check_results.append(check_result) break except Exception as e: pm_logger.error("[%s/%s] チェック処理中にエラーが発生しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) except Exception as e: pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, region=region_name, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) result_security_group = CheckResult.Normal if (len(check_results) > 0): result_security_group = CheckResult.CriticalDefect # 検出結果を1つのチェック結果JSONファイルに保存する。 try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_rule_security_group = { 'AWSAccount': awsaccount, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_rule_security_group, result_json_path, True) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", awsaccount) pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) return result_security_group
def check_cis_item_4_03(trace_id, check_history_id, organization_id, project_id, awsaccount, session, result_json_path, check_item_code, excluded_resources): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) check_results = [] try: regions = aws_common.get_regions(trace_id, session) except PmError as e: pm_logger.error("Regionの情報の取得に失敗しました。") e.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(e, pm_logger) for region in regions: region_name = region["RegionName"] try: if region_name in REGION_IGNORE: continue ec2_client = Ec2Utils.get_ec2_client(trace_id, session, region_name, awsaccount) # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にセキュリティグループ情報を取得する。 s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "VPC_SecurityGroups_" + region_name + ".json") if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: security_groups = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: security_groups = Ec2Utils.describe_security_groups( trace_id, awsaccount, ec2_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) if (len(security_groups) == 0): pm_logger.info("[%s/%s] セキュリティグループ情報の取得件数が0でした。", awsaccount, region_name) try: if (len(security_groups) > 0): FileUtils.upload_s3(trace_id, security_groups, s3_file_name, True) except PmError as e: pm_logger.error("[%s/%s] セキュリティグループ情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にEC2インスタンス情報を取得する。 s3_file_name_iam_instances = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "IAM_Instances_" + region_name + ".json") if (aws_common.check_exists_file_s3( trace_id, "S3_CHECK_BUCKET", s3_file_name_iam_instances)) is True: try: reservation_instances = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", s3_file_name_iam_instances) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: reservation_instances = Ec2Utils.describe_instances( trace_id, awsaccount, ec2_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) if (len(reservation_instances) == 0): pm_logger.info("[%s/%s] EC2インスタンス情報の取得件数が0でした。", awsaccount, region_name) try: if (len(reservation_instances) > 0): s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "VPC_SG_Instances_" + region_name + ".json") FileUtils.upload_s3(trace_id, reservation_instances, s3_file_name, True) except PmError as e: pm_logger.error("[%s/%s] EC2インスタンス情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) check1 = [] check2 = [] try: # リソース情報ファイルのセキュリティグループ情報から、インバウンドルール、アウトバウンドルールを設定しているdefaultセキュリティグループを検出する。 for security_group in security_groups: # check excluded resources resource_name = security_group['GroupId'] if common_utils.check_excluded_resources( check_item_code, region_name, ResourceType.GroupId, resource_name, excluded_resources): continue if (security_group['GroupName'] == CommonConst.DEFAULT and len(security_group['IpPermissions']) > 0 and len(security_group['IpPermissionsEgress']) > 0): check1.append(security_group['GroupId']) # リソース情報ファイルのEC2インスタンス情報から、defaultセキュリティグループをアタッチしたEC2インスタンスを検出する、。 for reservation_instance in reservation_instances: for instance in reservation_instance['Instances']: for security_group in instance['SecurityGroups']: if security_group[ 'GroupName'] == CommonConst.DEFAULT: if common_utils.check_key('Tags', instance) is True: name_tag = next( filter( lambda tag: tag['Key'] == 'Name', instance['Tags']), None) instance[ 'InstanceName'] = None if name_tag is None else name_tag[ 'Value'] check2.append(instance) if (len(check1) > 0 or len(check2) > 0): check_results.append( get_check_cis_item_4_03_result(check1, check2, region_name)) except Exception as e: pm_logger.error("[%s/%s] チェック処理中にエラーが発生しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) except Exception as e: pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, region=region_name, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) # Export File CHECK_CIS12_ITEM_4_03.json try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_cis_item_4_03 = { 'AWSAccount': awsaccount, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_cis_item_4_03, result_json_path, True) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", awsaccount) pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) # チェック結果 if len(check_results) > 0: return CheckResult.MinorInadequacies return CheckResult.Normal