Exemple #1
0
    def test_get_bucket_logging_error_method_not_allowed(self):
        # create mock throw error when called function get_bucket_logging
        expected_error_response = copy.deepcopy(DataCommon.ERROR_RESPONSE)
        expected_operation_name = copy.deepcopy(DataCommon.OPERATION_NAME)
        expected_error_response['Error']['Code'] = 'MethodNotAllowed'
        with patch.object(client_s3, 'get_bucket_logging') as mock_method:
            mock_method.side_effect = ClientError(
                error_response=expected_error_response,
                operation_name=expected_operation_name)
            with patch.object(PmLogAdapter, 'warning',
                              return_value=None) as mock_method_warning:
                with self.assertRaises(PmError) as exception:
                    # call function test
                    S3Utils.get_bucket_logging(trace_id, aws_account,
                                               client_s3, bucket_name,
                                               region_name)

        # check error
        actual_cause_error = exception.exception.cause_error
        self.assertEqual(expected_error_response['Error'],
                         actual_cause_error.response['Error'])
        self.assertEqual(expected_operation_name,
                         actual_cause_error.operation_name)

        # check write log warning
        mock_method_warning.assert_any_call(
            '[%s/%s] 権限エラーによりS3バケットロギング情報の取得に失敗しました。(%s)', aws_account,
            region_name, bucket_name)
    def test_get_bucket_encryption_error_other(self):
        # create mock throw error when called function get_bucket_encryption
        expected_error_response = copy.deepcopy(DataCommon.ERROR_RESPONSE)
        expected_operation_name = copy.deepcopy(DataCommon.OPERATION_NAME)
        with patch.object(client_s3, 'get_bucket_encryption') as mock_method:
            mock_method.side_effect = ClientError(
                error_response=expected_error_response,
                operation_name=expected_operation_name)
            with patch.object(PmLogAdapter, 'error',
                              return_value=None) as mock_method_error:
                with self.assertRaises(PmError) as exception:
                    # call function test
                    S3Utils.get_bucket_encryption(trace_id, client_s3,
                                                  bucket_name, aws_account,
                                                  region_name)

        # check error
        actual_cause_error = exception.exception.cause_error
        self.assertEqual(expected_error_response['Error'],
                         actual_cause_error.response['Error'])
        self.assertEqual(expected_operation_name,
                         actual_cause_error.operation_name)

        # check write log error
        mock_method_error.assert_any_call("[%s]S3バケット暗号化情報の取得に失敗しました。(%s/%s)",
                                          aws_account, region_name,
                                          bucket_name)
    def test_list_buckets_case_error(self):
        # create mock throw error when called function list_buckets
        expected_error_response = copy.deepcopy(DataCommon.ERROR_RESPONSE)
        expected_operation_name = copy.deepcopy(DataCommon.OPERATION_NAME)
        expected_error_response['Error']['Code'] = 'AccessDenied'
        with patch.object(client_s3, 'list_buckets') as mock_method:
            mock_method.side_effect = ClientError(
                error_response=expected_error_response,
                operation_name=expected_operation_name)
            with patch.object(PmLogAdapter, 'error',
                              return_value=None) as mock_method_error:
                with self.assertRaises(PmError) as exception:
                    # call function test
                    S3Utils.list_buckets(trace_id, client_s3, aws_account)

        # check error
        actual_cause_error = exception.exception.cause_error
        self.assertEqual(expected_error_response['Error'],
                         actual_cause_error.response['Error'])
        self.assertEqual(expected_operation_name,
                         actual_cause_error.operation_name)

        # check write log error
        mock_method_error.assert_any_call('[%s] S3バケット一覧情報の取得に失敗しました。',
                                          aws_account)
    def test_get_bucket_encryption_error_server_side_encryption_configuration_not_found_error(self):
        # create mock throw error when called function get_bucket_encryption
        expected_error_response = copy.deepcopy(DataCommon.ERROR_RESPONSE)
        expected_operation_name = copy.deepcopy(DataCommon.OPERATION_NAME)
        expected_error_response['Error'][
            'Code'] = 'ServerSideEncryptionConfigurationNotFoundError'
        with patch.object(client_s3, 'get_bucket_encryption') as mock_method:
            mock_method.side_effect = ClientError(
                error_response=expected_error_response,
                operation_name=expected_operation_name)
            with patch.object(PmLogAdapter, 'info',
                              return_value=None) as mock_method_info:
                with self.assertRaises(PmError) as exception:
                    # call function test
                    S3Utils.get_bucket_encryption(trace_id, client_s3,
                                                  bucket_name, aws_account,
                                                  region_name)

        # check error
        actual_cause_error = exception.exception.cause_error
        self.assertEqual(expected_error_response['Error'],
                         actual_cause_error.response['Error'])
        self.assertEqual(expected_operation_name,
                         actual_cause_error.operation_name)

        # check write log info
        mock_method_info.assert_any_call('[%s]S3バケット暗号化情報がありません。(%s/%s)',
                                         aws_account, region_name, bucket_name)
def get_bucket_acl(trace_id, check_history_id, organization_id, project_id,
                   aws_account, region_name, bucket_name, s3_client):
    pm_logger = common_utils.begin_logger(trace_id, __name__,
                                          inspect.currentframe())
    s3_file_name = CommonConst.PATH_CHECK_RAW.format(
        check_history_id, organization_id, project_id, aws_account,
        "ASC/S3_ACL_" + region_name + "_" + bucket_name + ".json")

    # リソース情報取得
    if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET",
                                        s3_file_name)) is True:
        try:
            bucket_acl = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET",
                                             s3_file_name)
        except PmError as e:
            raise common_utils.write_log_pm_error(e, pm_logger)
    else:
        try:
            bucket_acl = S3Utils.get_bucket_acl(
                trace_id, s3_client, bucket_name, aws_account, region_name)
        except PmError as e:
            raise common_utils.write_log_pm_error(e, pm_logger)

        # 取得したS3バケットのアクセスコントロールリスト情報をS3に保存する。(アクセスコントロールリスト情報)
        try:
            FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", bucket_acl,
                                  s3_file_name)
        except PmError as e:
            pm_logger.error("[%s] S3バケットACL情報のS3保存に失敗しました。(%s)/(%s)",
                            aws_account, region_name, bucket_name)
    return bucket_acl
Exemple #6
0
def get_list_buckets(trace_id, check_history_id, organization_id, project_id,
                     s3_client, aws_account):
    pm_logger = common_utils.begin_logger(trace_id, __name__,
                                          inspect.currentframe())

    s3_file_name = CommonConst.PATH_CHECK_RAW.format(
        check_history_id, organization_id, project_id, aws_account,
        "ASC/S3_ListBuckets.json")
    # リソース情報取得
    if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET",
                                        s3_file_name)) is True:
        try:
            list_buckets = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET",
                                               s3_file_name)
        except PmError as e:
            raise common_utils.write_log_exception(e, pm_logger)
    else:
        # S3バケット一覧を取得します。
        try:
            list_buckets = S3Utils.list_buckets(trace_id, s3_client,
                                                aws_account)
        except PmError as e:
            raise common_utils.write_log_exception(e, pm_logger)

        # S3バケット一覧情報をS3に保存します。
        try:
            FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", list_buckets,
                                  s3_file_name)
        except PmError as e:
            pm_logger.error("[%s] S3バケット一覧情報のS3保存に失敗しました。", aws_account)
            raise common_utils.write_log_exception(e, pm_logger)
    return list_buckets
    def test_get_bucket_location_success_response_not_exists_location_constraint(
            self):
        expected_location_bucket = copy.deepcopy(DataTestS3.LOCATION_BUCKET)
        # check exist bucket
        list_buckets = client_s3.list_buckets()
        if len(list_buckets['Buckets']) > 0:
            for bucket in list_buckets['Buckets']:
                client_s3.delete_bucket(Bucket=bucket['Name'])

        # prepare data
        client_s3.create_bucket(Bucket=bucket_name,
                                CreateBucketConfiguration={
                                    'LocationConstraint':
                                    expected_location_bucket
                                })

        # call function test
        with patch.object(client_s3, 'get_bucket_location') as mock_method:
            mock_method.return_value = {}
            actual_bucket_location = S3Utils.get_bucket_location(
                trace_id, client_s3, bucket_name, aws_account)

        # check result
        expected_location_bucket = None
        self.assertEqual(expected_location_bucket, actual_bucket_location)
    def test_get_bucket_encryption_success_response_not_exists_server_side_encryption_configuration_and_rules(self):
        # create mock data return when called function get_bucket_encryption
        with patch.object(client_s3, 'get_bucket_encryption') as mock_method:
            mock_method.return_value = {}
            # call function test
            actual_bucket_encryption = S3Utils.get_bucket_encryption(
                trace_id, client_s3, bucket_name, aws_account, region_name)

        # check result
        expected_bucket_encryption = []
        self.assertEqual(expected_bucket_encryption, actual_bucket_encryption)
    def test_get_bucket_encryption_success_response_exists_server_side_encryption_configuration_and_rules(self):
        expected_bucket_encryption = copy.deepcopy(
            DataTestS3.BUCKET_ENCRYPTION)

        # create mock data return when called function get_bucket_encryption
        with patch.object(client_s3, 'get_bucket_encryption') as mock_method:
            mock_method.return_value = expected_bucket_encryption
            # call function test
            actual_bucket_encryption = S3Utils.get_bucket_encryption(
                trace_id, client_s3, bucket_name, aws_account, region_name)

        # check result
        self.assertEqual(
            expected_bucket_encryption['ServerSideEncryptionConfiguration']
            ['Rules'], actual_bucket_encryption)
Exemple #10
0
    def test_get_bucket_logging_success_response_exists_logging_enabled(self):
        expected_bucket_logging = copy.deepcopy(DataTestS3.BUCKET_LOGGING)

        # prepare data
        client_s3.create_bucket(**info_bucket)
        client_s3.put_bucket_logging(**expected_bucket_logging)

        # call function test
        actual_bucket_logging = S3Utils.get_bucket_logging(
            trace_id, aws_account, client_s3, bucket_name, region_name)

        # check result
        self.assertEqual(bucket_name, actual_bucket_logging['TargetBucket'])
        self.assertEqual(
            expected_bucket_logging['BucketLoggingStatus']['LoggingEnabled'],
            actual_bucket_logging)
Exemple #11
0
    def test_get_bucket_policy_success(self):
        bucket_policy = copy.deepcopy(DataTestS3.BUCKET_POLICY)
        expected_bucket_policy = bucket_policy['Policy']

        # prepare data
        client_s3.create_bucket(**info_bucket)
        client_s3.put_bucket_policy(Bucket=bucket_name,
                                    Policy=expected_bucket_policy)

        # call function test
        actual_bucket_policy = S3Utils.get_bucket_policy(
            trace_id, client_s3, bucket_name, aws_account, region_name)

        # check result
        self.assertEqual(expected_bucket_policy,
                         actual_bucket_policy['Policy'])
Exemple #12
0
    def test_get_bucket_logging_success_response_not_exists_logging_enabled(
            self):
        expected_bucket_logging = copy.deepcopy(DataTestS3.BUCKET_LOGGING)

        # prepare data
        client_s3.create_bucket(**info_bucket)
        client_s3.put_bucket_logging(**expected_bucket_logging)

        with patch.object(client_s3, 'get_bucket_logging') as mock_method:
            mock_method.return_value = {}
            # call function test
            actual_bucket_logging = S3Utils.get_bucket_logging(
                trace_id, aws_account, client_s3, bucket_name, region_name)

        # check result
        expected_bucket_logging = []
        self.assertEqual(expected_bucket_logging, actual_bucket_logging)
    def test_get_bucket_location_success_response_exists_location_constraint(
            self):
        expected_location_bucket = copy.deepcopy(DataTestS3.LOCATION_BUCKET)

        # prepare data
        client_s3.create_bucket(Bucket=bucket_name,
                                CreateBucketConfiguration={
                                    'LocationConstraint':
                                    expected_location_bucket
                                })

        # call function test
        actual_bucket_location = S3Utils.get_bucket_location(
            trace_id, client_s3, bucket_name, aws_account)

        # check result
        self.assertEqual(expected_location_bucket, actual_bucket_location)
Exemple #14
0
    def test_get_bucket_acl_success(self):
        expected_bucket_acl = copy.deepcopy(DataTestS3.ACCESS_CONTROL_POLICY)

        # prepare data
        client_s3.create_bucket(**info_bucket)
        client_s3.put_bucket_acl(Bucket=bucket_name,
                                 AccessControlPolicy=expected_bucket_acl)

        # call function test
        actual_bucket_acl = S3Utils.get_bucket_acl(trace_id, client_s3,
                                                   bucket_name, aws_account,
                                                   region_name)

        # check result
        self.assertEqual(expected_bucket_acl['Owner'],
                         actual_bucket_acl['Owner'])
        self.assertEqual(expected_bucket_acl['Grants'],
                         actual_bucket_acl['Grants'])
    def test_list_buckets_success(self):
        expected_bucket_name = copy.deepcopy(DataTestS3.INFO_BUCKET['Bucket'])

        # check exist bucket
        list_buckets = client_s3.list_buckets()
        if len(list_buckets['Buckets']) > 0:
            for bucket in list_buckets['Buckets']:
                client_s3.delete_bucket(Bucket=bucket['Name'])

        # create s3 bucket
        s3_utils.create_bucket(expected_bucket_name)

        # call function test
        result_list_buckets = S3Utils.list_buckets(trace_id, client_s3,
                                                   aws_account)

        # check result
        actual_bucket_name = result_list_buckets['Buckets'][0]['Name']
        self.assertEqual(expected_bucket_name, actual_bucket_name)
Exemple #16
0
    def test_get_bucket_policy_error_no_such_bucket_policy(self):
        # create mock throw error when called function get_bucket_policy
        expected_error_response = copy.deepcopy(DataCommon.ERROR_RESPONSE)
        expected_operation_name = copy.deepcopy(DataCommon.OPERATION_NAME)
        expected_error_response['Error']['Code'] = 'NoSuchBucketPolicy'
        with patch.object(client_s3, 'get_bucket_policy') as mock_method:
            mock_method.side_effect = ClientError(
                error_response=expected_error_response,
                operation_name=expected_operation_name)
            with patch.object(PmLogAdapter, 'info',
                              return_value=None) as mock_method_info:
                # call function test
                result_bucket_policy = S3Utils.get_bucket_policy(
                    trace_id, client_s3, bucket_name, aws_account, region_name)

        self.assertEqual(result_bucket_policy, None)

        # check write log info
        mock_method_info.assert_any_call('[%s/%s]S3バケットポリシーは未設定です。(%s)',
                                         aws_account, region_name, bucket_name)
def check_asc_item_13_01(trace_id, check_history_id, organization_id,
                         project_id, aws_account, session, result_json_path):
    cw_logger = common_utils.begin_cw_logger(trace_id, __name__,
                                             inspect.currentframe())
    check_results = []
    is_authorized = True
    s3_client = S3Utils.get_s3_client(trace_id, session, aws_account,
                                      is_cw_logger=True)
    try:
        list_buckets = asc_item_common_logic.get_list_buckets(
            trace_id, check_history_id, organization_id, project_id, s3_client,
            aws_account)
    except PmError as e:
        return CheckResult.Error

    # 取得したS3バケットのアクセスコントロールリスト情報をS3に保存する(リソース情報ファイル)。
    for bucket in list_buckets['Buckets']:
        bucket_name = bucket['Name']
        region_name = None
        try:
            region_name = S3Utils.get_bucket_location(
                trace_id, s3_client, bucket_name, aws_account)
            if region_name is None:
                region_name = CommonConst.US_EAST_REGION
            bucket_encryption_rules = S3Utils.get_bucket_encryption(
                trace_id, s3_client, bucket_name, aws_account,
                region_name, is_cw_logger=True)
        except PmError as e:
            if e.cause_error.response['Error'][
                    'Code'] in CommonConst.SERVER_SIDE_ENCRYPTION_CONFIGURATION_NOT_FOUND_ERROR:
                check_results.append(get_check_asc_item_13_01_result(
                    region_name, bucket_name))
                continue
            elif e.cause_error.response['Error'][
                    'Code'] in CommonConst.S3_SKIP_EXCEPTION:
                error_operation = e.cause_error.operation_name,
                error_code = e.cause_error.response['Error']['Code'],
                error_message = e.cause_error.response['Error']['Message']
                if region_name is None:
                    region_name = CommonConst.ERROR
                check_results.append(
                    asc_item_common_logic.get_error_authorized_result(
                        region_name, bucket_name, error_operation,
                        error_code, error_message))
                is_authorized = False
                continue
            else:
                return CheckResult.Error

        if len(bucket_encryption_rules) == 0:
            continue

        try:
            s3_file_name = CommonConst.PATH_CHECK_RAW.format(
                check_history_id, organization_id, project_id, aws_account,
                "ASC/S3_Encryption_" + region_name + "_" + bucket_name +
                ".json")
            FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET",
                                  bucket_encryption_rules, s3_file_name,
                                  is_cw_logger=True)
        except PmError as e:
            cw_logger.error("[%s] S3バケット暗号化情報のS3保存に失敗しました。(%s/%s)",
                            aws_account, region_name, bucket_name)
            return CheckResult.Error

        # チェック処理
        try:
            for bucket_encryption_rule in bucket_encryption_rules:
                if (common_utils.check_key(
                        "SSEAlgorithm",
                        bucket_encryption_rule['ApplyServerSideEncryptionByDefault']
                ) is False):
                    check_results.append(get_check_asc_item_13_01_result(
                        region_name, bucket_name))
                    break
        except Exception as e:
            cw_logger.error("[%s] チェック処理中にエラーが発生しました。(%s/%s)", aws_account,
                            region_name, bucket_name)
            return CheckResult.Error

    # Export File json
    try:
        current_date = date_utils.get_current_date_by_format(
            date_utils.PATTERN_YYYYMMDDHHMMSS)
        check_asc_item_13_01 = {
            'AWSAccount': aws_account,
            'CheckResults': check_results,
            'DateTime': current_date
        }
        FileUtils.upload_s3(trace_id, check_asc_item_13_01, result_json_path,
                            format_json=True, is_cw_logger=True)
    except Exception as e:
        cw_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account)
        return CheckResult.Error

    # チェック結果
    if is_authorized is False:
        return CheckResult.Error
    if len(check_results) > 0:
        return CheckResult.CriticalDefect
    return CheckResult.Normal
def check_asc_item_16_01(trace_id, check_history_id, organization_id,
                         project_id, aws_account, session, result_json_path):
    pm_logger = common_utils.begin_logger(trace_id, __name__,
                                          inspect.currentframe())
    check_results = []
    is_authorized = True

    # 取得したクレデンシャル情報を使用して、S3クライアントを作成します。
    try:
        s3_client = S3Utils.get_s3_client(trace_id, session, aws_account)
    except PmError as e:
        raise common_utils.write_log_pm_error(e, pm_logger)

    # S3バケット一覧を取得します。
    try:
        list_buckets = asc_item_common_logic.get_list_buckets(
            trace_id, check_history_id, organization_id, project_id, s3_client,
            aws_account)
    except PmError as e:
        return CheckResult.Error

    for bucket in list_buckets["Buckets"]:
        bucket_name = bucket['Name']
        region_name = None
        try:
            # 取得したS3バケット一覧情報ファイルをもとに、各バケットのリージョンを取得する。
            region_name = S3Utils.get_bucket_location(trace_id, s3_client,
                                                      bucket_name, aws_account)
            if region_name is None:
                region_name = CommonConst.US_EAST_REGION

            # 取得したS3バケット情報ファイルをもとに、該当のS3バケットのアクセスコントロールリストを取得する。
            bucket_acl = get_bucket_acl(
                trace_id, check_history_id, organization_id, project_id,
                aws_account, region_name, bucket_name, s3_client)

            # 取得したS3バケット情報をもとに、S3のロギング情報を取得する。
            bucket_logging = S3Utils.get_bucket_logging(
                trace_id, aws_account, s3_client, bucket_name, region_name)
        except PmError as e:
            if e.cause_error.response['Error'][
                    'Code'] in CommonConst.S3_SKIP_EXCEPTION:
                error_operation = e.cause_error.operation_name,
                error_code = e.cause_error.response['Error']['Code'],
                error_message = e.cause_error.response['Error']['Message']
                if region_name is None:
                    region_name = CommonConst.ERROR
                check_results.append(
                    asc_item_common_logic.get_error_authorized_result(
                        region_name, bucket_name, error_operation, error_code,
                        error_message))
                is_authorized = False
                continue
            else:
                return CheckResult.Error

        # 取得したS3ロギング情報をS3に保存する(リソース情報ファイル)。
        try:
            s3_file_name = CommonConst.PATH_CHECK_RAW.format(
                check_history_id, organization_id, project_id, aws_account,
                "ASC/S3_ClientLogging_" + region_name + "_" + bucket_name +
                ".json")
            FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", bucket_logging,
                                  s3_file_name)
        except PmError as e:
            pm_logger.error("[%s] S3バケットロギング情報の取得に失敗しました。(%s/%s)", aws_account,
                            region_name, bucket_name)
            return CheckResult.Error

        # チェック処理
        bucket_abnormity = True
        try:

            # Check-1. ACLによりLogDeliveryに操作権限が与えられたS3バケットが存在するか
            for grant in bucket_acl["Grants"]:
                if (common_utils.check_key("URI", grant['Grantee']) and
                        grant['Grantee']["URI"] == LOG_DELIVERY_URI):
                    bucket_abnormity = False
                    break

            # Check-2. S3バケットでログ記録が有効になっていないものは存在するか
            if bucket_abnormity is True and len(bucket_logging) == 0:
                result = {
                    'Region': region_name,
                    'Level': CommonConst.LEVEL_CODE_21,
                    'DetectionItem': {
                        'BucketName': bucket_name
                    }
                }
                check_results.append(result)
        except Exception as e:
            pm_logger.error("[%s] チェック処理中にエラーが発生しました。(%s/%s)", aws_account,
                            region_name, bucket_name)
            return CheckResult.Error

    # Export File CHECK_ASC_ITEM_16_01.json
    try:
        current_date = date_utils.get_current_date_by_format(
            date_utils.PATTERN_YYYYMMDDHHMMSS)
        check_asc_item_16_01 = {
            'AWSAccount': aws_account,
            'CheckResults': check_results,
            'DateTime': current_date
        }
        FileUtils.upload_s3(trace_id, check_asc_item_16_01, result_json_path,
                            True)
    except Exception as e:
        pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account)
        return CheckResult.Error

    # チェック結果
    if is_authorized is False:
        return CheckResult.Error
    if len(check_results) > 0:
        return CheckResult.CriticalDefect
    return CheckResult.Normal
Exemple #19
0
def check_asc_item_12_01(trace_id, check_history_id, organization_id,
                         project_id, aws_account, session, result_json_path):
    cw_logger = common_utils.begin_cw_logger(trace_id, __name__,
                                             inspect.currentframe())
    check_results = []
    is_authorized = True

    # 取得したクレデンシャル情報を使用して、S3クライアントを作成します。
    try:
        s3_client = S3Utils.get_s3_client(trace_id,
                                          session,
                                          aws_account,
                                          is_cw_logger=True)
    except PmError as e:
        raise common_utils.write_log_pm_error(e, cw_logger)

    # S3バケット一覧を取得します。
    try:
        list_buckets = S3Utils.list_buckets(trace_id,
                                            s3_client,
                                            aws_account,
                                            is_cw_logger=True)
    except PmError as e:
        return CheckResult.Error

    # S3バケット一覧情報をS3に保存します。
    try:
        s3_file_name = CommonConst.PATH_CHECK_RAW.format(
            check_history_id, organization_id, project_id, aws_account,
            "ASC/S3_ListBuckets.json")
        FileUtils.upload_json(trace_id,
                              "S3_CHECK_BUCKET",
                              list_buckets,
                              s3_file_name,
                              is_cw_logger=True)
    except PmError as e:
        cw_logger.error("[%s] S3バケット一覧情報のS3保存に失敗しました。", aws_account)
        return CheckResult.Error

    for bucket in list_buckets["Buckets"]:
        bucket_name = bucket['Name']
        region_name = None
        try:
            # 取得したS3バケット一覧情報ファイルをもとに、各バケットのリージョンを取得する。
            region_name = S3Utils.get_bucket_location(trace_id,
                                                      s3_client,
                                                      bucket_name,
                                                      aws_account,
                                                      is_cw_logger=True)
            if region_name is None:
                region_name = CommonConst.US_EAST_REGION

            # 取得したS3バケット一覧情報ファイルをもとに、該当のS3バケットのアクセスコントロールリストを取得する。
            bucket_acl = S3Utils.get_bucket_acl(trace_id, s3_client,
                                                bucket_name, aws_account,
                                                region_name)

            # 取得したS3バケット情報ファイルをもとに、該当のS3バケットのバケットポリシーを取得する。
            bucket_policy = S3Utils.get_bucket_policy(trace_id,
                                                      s3_client,
                                                      bucket_name,
                                                      aws_account,
                                                      region_name,
                                                      is_cw_logger=True)
            if bucket_policy is None:
                continue
        except PmError as e:
            if e.cause_error.response['Error'][
                    'Code'] in CommonConst.S3_SKIP_EXCEPTION:
                error_operation = e.cause_error.operation_name,
                error_code = e.cause_error.response['Error']['Code'],
                error_message = e.cause_error.response['Error']['Message']
                if region_name is None:
                    region_name = CommonConst.ERROR
                check_results.append(
                    asc_item_common_logic.get_error_authorized_result(
                        region_name, bucket_name, error_operation, error_code,
                        error_message))
                is_authorized = False
                continue
            else:
                return CheckResult.Error

        # 取得したS3バケットのアクセスコントロールリスト情報をS3に保存する。(アクセスコントロールリスト情報)
        try:
            s3_file_name = CommonConst.PATH_CHECK_RAW.format(
                check_history_id, organization_id, project_id, aws_account,
                "ASC/S3_ACL_" + region_name + "_" + bucket_name + ".json")
            FileUtils.upload_json(trace_id,
                                  "S3_CHECK_BUCKET",
                                  bucket_acl,
                                  s3_file_name,
                                  is_cw_logger=True)
        except PmError as e:
            cw_logger.error("[%s] S3バケットACL情報のS3保存に失敗しました。(%s)/(%s)",
                            aws_account, region_name, bucket_name)
            return CheckResult.Error

        # 取得したS3バケットのバケットポリシー情報をS3に保存する。(バケットポリシー情報)
        try:
            s3_file_name = CommonConst.PATH_CHECK_RAW.format(
                check_history_id, organization_id, project_id, aws_account,
                "ASC/S3_Bucketpolicy_" + region_name + "_" + bucket_name +
                ".json")
            FileUtils.upload_json(trace_id,
                                  "S3_CHECK_BUCKET",
                                  bucket_policy,
                                  s3_file_name,
                                  is_cw_logger=True)
        except PmError as e:
            cw_logger.error("[%s]  S3バケットポリシー情報のS3保存に失敗しました。(%s/%s)",
                            aws_account, region_name, bucket_name)
            return CheckResult.Error

        # チェック処理
        bucket_acl_abnormity = False
        bucket_policy_abnormity = False
        try:

            # Check-1. ACLによりAllUsersに操作権限が与えられたS3バケットが存在するか
            # Check-2. ACLによりAuthenticatedUsersに操作権限が与えられたS3バケットが存在するか
            for grant in bucket_acl["Grants"]:
                if (common_utils.check_key("URI", grant['Grantee'])):
                    if grant['Grantee']["URI"] in ACL_URI:
                        bucket_acl_abnormity = True
                        break

            # Check-3. バケットポリシーのプリンシパルにて全てのユーザに操作権限が与えられたS3バケットが存在するか
            bucket_policy = ast.literal_eval(bucket_policy['Policy'])
            for statement in bucket_policy["Statement"]:
                if (statement["Effect"] == CommonConst.ALLOW
                        and statement["Principal"] == CommonConst.ALL):
                    bucket_policy_abnormity = True
                    break

            if bucket_acl_abnormity is True or bucket_policy_abnormity is True:
                check_results.append(
                    get_check_accessible_result(region_name,
                                                bucket_acl_abnormity,
                                                bucket_policy_abnormity,
                                                bucket_name))
        except Exception as e:
            cw_logger.error("[%s] チェック処理中にエラーが発生しました。(%s/%s)", aws_account,
                            region_name, bucket_name)
            return CheckResult.Error

    # Export File CHECK_ASC_ITEM_12_01.json
    try:
        current_date = date_utils.get_current_date_by_format(
            date_utils.PATTERN_YYYYMMDDHHMMSS)
        check_bucket = {
            'AWSAccount': aws_account,
            'CheckResults': check_results,
            'DateTime': current_date
        }
        FileUtils.upload_s3(trace_id,
                            check_bucket,
                            result_json_path,
                            format_json=True,
                            is_cw_logger=True)
    except Exception as e:
        cw_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account)
        return CheckResult.Error
    # チェック結果
    if is_authorized is False:
        return CheckResult.Error
    if len(check_results) > 0:
        return CheckResult.CriticalDefect
    return CheckResult.Normal