def get_bucket_acl(trace_id, check_history_id, organization_id, project_id, aws_account, region_name, bucket_name, s3_client): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_ACL_" + region_name + "_" + bucket_name + ".json") # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: bucket_acl = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: bucket_acl = S3Utils.get_bucket_acl( trace_id, s3_client, bucket_name, aws_account, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) # 取得したS3バケットのアクセスコントロールリスト情報をS3に保存する。(アクセスコントロールリスト情報) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", bucket_acl, s3_file_name) except PmError as e: pm_logger.error("[%s] S3バケットACL情報のS3保存に失敗しました。(%s)/(%s)", aws_account, region_name, bucket_name) return bucket_acl
def get_list_buckets(trace_id, check_history_id, organization_id, project_id, s3_client, aws_account): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_ListBuckets.json") # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: list_buckets = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_exception(e, pm_logger) else: # S3バケット一覧を取得します。 try: list_buckets = S3Utils.list_buckets(trace_id, s3_client, aws_account) except PmError as e: raise common_utils.write_log_exception(e, pm_logger) # S3バケット一覧情報をS3に保存します。 try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", list_buckets, s3_file_name) except PmError as e: pm_logger.error("[%s] S3バケット一覧情報のS3保存に失敗しました。", aws_account) raise common_utils.write_log_exception(e, pm_logger) return list_buckets
def get_account_password_policy(trace_id, check_history_id, organization_id, project_id, awsaccount, session, result_json_path): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "IBP/IAM_AccountPasswordPolicy.json") # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: account_password_policy = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: account_password_policy = IAMUtils.get_account_password_policy( trace_id, session, awsaccount) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) # アカウントパスワードポリシー情報をS3に保存します。 try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", account_password_policy, s3_file_name) except PmError as e: pm_logger.error("[%s] アカウントパスワードポリシー情報のS3保存に失敗しました。", awsaccount) raise common_utils.write_log_pm_error(e, pm_logger) return account_password_policy
def test_upload_json_success(self): # connect s3 resource_s3 = s3_utils.resource_connect() check_bucket = common_utils.get_environ("S3_CHECK_BUCKET") # prepare data resource_s3.create_bucket(Bucket=check_bucket) more_binary_data = json.dumps(data_file, indent=4, default=FileUtils.convert_handler, ensure_ascii=False) with patch.object(PmLogAdapter, 'info', return_value=None) as mock_method_info: with patch.object(boto3, 'client') as mock_method_client: mock_method_client.return_value = client_s3 with patch.object(client_s3, 'put_object') as mock_method: # call function test FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", data_file, s3_file_name) # check write log info mock_method_info.assert_any_call( "Upload file json [%s] success on bucket [%s]", s3_file_name, check_bucket) # check connect client mock_method_client.assert_called_with('s3') # check param call function put_object mock_method.assert_any_call(Body=more_binary_data, Bucket=check_bucket, Key=s3_file_name)
def check_ibp_item_02_01(trace_id, check_history_id, organization_id, project_id, aws_account, session, result_json_path): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) check_results = [] # IAMユーザの一覧を取得する。 try: list_users = IAMUtils.get_list_users(trace_id, session, aws_account) except PmError as e: pm_logger.error("[%s] IAMユーザー一覧情報取得に失敗しました。", aws_account) return CheckResult.Error try: # 取得したユーザ一覧をS3に保存する(リソース情報ファイル)。 s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "IBP/IAM_ListUsers.json") FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", list_users, s3_file_name) except PmError as e: pm_logger.error("[%s] IAMユーザー一覧情報のS3保存に失敗しました。", aws_account) return CheckResult.Error # チェックルール # Check-1. IAMユーザが存在するか try: if (len(list_users) == 0): result = { 'Region': 'Global', 'Level': CommonConst.LEVEL_CODE_21, 'DetectionItem': { 'NoIAMUser': True } } check_results.append(result) except Exception as e: pm_logger.error("[%s] チェック処理中にエラーが発生しました。", aws_account) return CheckResult.Error # Export File CHECK_IBP_ITEM_02_01.json try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_ibp_item_02_01 = { 'AWSAccount': aws_account, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", check_ibp_item_02_01, result_json_path) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) return CheckResult.Error # チェック結果 if len(check_results) > 0: return CheckResult.CriticalDefect return CheckResult.Normal
def list_subscriptions_by_topic(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, sns_client, topic_arn): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) topic_name = topic_arn.split(":")[5] s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "SNS_Topic_Subscriptions_Info_" + region_name + "_" + topic_name + ".json") subscriptions = [] # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: subscriptions = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にCloudTrail情報を取得する。 try: subscriptions = SNSUtils.list_subscriptions_by_topic( trace_id, sns_client, awsaccount, region_name, topic_arn) except PmError as e: data_body = {'TopicArn': topic_arn} e.pm_notification_error = PmNotificationError( code_error=CommonConst.KEY_CODE_ERROR_GET_SUBSCRIPTIONS, data_body=data_body) raise common_utils.write_log_pm_error(e, pm_logger) # 取得したSNS Topicサブスクリプション情報情報をS3に保存する(リソース情報ファイル) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", subscriptions, s3_file_name) except PmError as e: pm_logger.error("[%s/%s] SNS Topicサブスクリプション情報の情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) # 情報の取得件数が0でした。 if (len(subscriptions) == 0): pm_logger.warning("[%s/%s] SNS Topicサブスクリプション情報情報の取得件数が0でした。", awsaccount, region_name) return subscriptions
def describe_metric_filters(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, logs_client, cloud_trail_log_group_name): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "CloudWatch_Logs_Metric_Filters_Info_" + region_name + "_" + cloud_trail_log_group_name.replace( CommonConst.SLASH, "-SLASH-") + ".json") metric_filters = [] # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: metric_filters = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: metric_filters = LogsUtils.describe_metric_filters( trace_id, logs_client, awsaccount, region_name, cloud_trail_log_group_name) except PmError as e: data_body = {'CloudTrailLogGroupName': cloud_trail_log_group_name} e.pm_notification_error = PmNotificationError( code_error=CommonConst.KEY_CODE_ERROR_GET_METRIC_FILTERS, data_body=data_body) raise common_utils.write_log_pm_error(e, pm_logger) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", metric_filters, s3_file_name) except PmError as e: pm_logger.error("[%s/%s] メトリクスフィルタ情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) # 情報の取得件数が0でした。 if (len(metric_filters) == 0): pm_logger.warning("[%s/%s] メトリクスフィルタ情報の取得件数が0でした。", awsaccount, region_name) return metric_filters
def test_upload_json_error_client(self): # create mock error client s3 self.create_mock_boto3_client_error() expected_error_response = copy.deepcopy(DataCommon.ERROR_RESPONSE) expected_operation_name = copy.deepcopy(DataCommon.OPERATION_NAME) FileUtils.global_s3_client = None with self.assertRaises(PmError) as exception: # call function test FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", data_file, s3_file_name) # check error actual_cause_error = exception.exception.cause_error self.assertEqual(expected_error_response['Error'], actual_cause_error.response['Error']) self.assertEqual(expected_operation_name, actual_cause_error.operation_name)
def test_upload_json_error_call_json_dumps(self): # create mock throw error when called function json_dumps expected_error_response = copy.deepcopy(DataCommon.ERROR_RESPONSE) expected_operation_name = copy.deepcopy(DataCommon.OPERATION_NAME) with patch.object(json, 'dumps') as mock_method: mock_method.side_effect = ClientError( error_response=expected_error_response, operation_name=expected_operation_name) with self.assertRaises(PmError) as exception: # call function test FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", data_file, s3_file_name) # check error actual_cause_error = exception.exception.cause_error self.assertEqual(expected_error_response['Error'], actual_cause_error.response['Error']) self.assertEqual(expected_operation_name, actual_cause_error.operation_name)
def describe_alarms(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, cloudwatch_client): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "CloudWatch_Alarm_Info_" + region_name + ".json") metric_alarms = [] # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: metric_alarms = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: metric_alarms = CloudWatchlUtils.describe_alarms( trace_id, awsaccount, cloudwatch_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) # 取得したCloudWatchAlarm情報をS3に保存する(リソース情報ファイル) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", metric_alarms, s3_file_name) except PmError as e: pm_logger.error("[%s/%s] CloudWatchAlarmの情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) # 情報の取得件数が0でした。 if (len(metric_alarms) == 0): pm_logger.warning("[%s/%s] CloudWatchAlarm情報の取得件数が0でした。", awsaccount, region_name) return metric_alarms
def get_cloud_trails(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, session): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "CloudTrail_" + region_name + ".json") cloud_trails = [] # Check Exist File S3 CloudTrail_{region_name}.json if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: cloud_trails = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にCloudTrail情報を取得する。 trail_client = CloudTrailUtils.get_trail_client( trace_id, session, region_name, awsaccount) try: cloud_trails = CloudTrailUtils.describe_cloud_trails( trace_id, awsaccount, trail_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) if (len(cloud_trails) == 0): pm_logger.warning("[%s/%s] CloudTrail情報の取得件数が0でした。", awsaccount, region_name) # 取得したCloudTrail情報をS3に保存する(リソース情報ファイル) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", cloud_trails, s3_file_name) except PmError as e: pm_logger.error("[%s/%s] CloudTrailの情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) return cloud_trails
def check_asc_item_12_01(trace_id, check_history_id, organization_id, project_id, aws_account, session, result_json_path): cw_logger = common_utils.begin_cw_logger(trace_id, __name__, inspect.currentframe()) check_results = [] is_authorized = True # 取得したクレデンシャル情報を使用して、S3クライアントを作成します。 try: s3_client = S3Utils.get_s3_client(trace_id, session, aws_account, is_cw_logger=True) except PmError as e: raise common_utils.write_log_pm_error(e, cw_logger) # S3バケット一覧を取得します。 try: list_buckets = S3Utils.list_buckets(trace_id, s3_client, aws_account, is_cw_logger=True) except PmError as e: return CheckResult.Error # S3バケット一覧情報をS3に保存します。 try: s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_ListBuckets.json") FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", list_buckets, s3_file_name, is_cw_logger=True) except PmError as e: cw_logger.error("[%s] S3バケット一覧情報のS3保存に失敗しました。", aws_account) return CheckResult.Error for bucket in list_buckets["Buckets"]: bucket_name = bucket['Name'] region_name = None try: # 取得したS3バケット一覧情報ファイルをもとに、各バケットのリージョンを取得する。 region_name = S3Utils.get_bucket_location(trace_id, s3_client, bucket_name, aws_account, is_cw_logger=True) if region_name is None: region_name = CommonConst.US_EAST_REGION # 取得したS3バケット一覧情報ファイルをもとに、該当のS3バケットのアクセスコントロールリストを取得する。 bucket_acl = S3Utils.get_bucket_acl(trace_id, s3_client, bucket_name, aws_account, region_name) # 取得したS3バケット情報ファイルをもとに、該当のS3バケットのバケットポリシーを取得する。 bucket_policy = S3Utils.get_bucket_policy(trace_id, s3_client, bucket_name, aws_account, region_name, is_cw_logger=True) if bucket_policy is None: continue except PmError as e: if e.cause_error.response['Error'][ 'Code'] in CommonConst.S3_SKIP_EXCEPTION: error_operation = e.cause_error.operation_name, error_code = e.cause_error.response['Error']['Code'], error_message = e.cause_error.response['Error']['Message'] if region_name is None: region_name = CommonConst.ERROR check_results.append( asc_item_common_logic.get_error_authorized_result( region_name, bucket_name, error_operation, error_code, error_message)) is_authorized = False continue else: return CheckResult.Error # 取得したS3バケットのアクセスコントロールリスト情報をS3に保存する。(アクセスコントロールリスト情報) try: s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_ACL_" + region_name + "_" + bucket_name + ".json") FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", bucket_acl, s3_file_name, is_cw_logger=True) except PmError as e: cw_logger.error("[%s] S3バケットACL情報のS3保存に失敗しました。(%s)/(%s)", aws_account, region_name, bucket_name) return CheckResult.Error # 取得したS3バケットのバケットポリシー情報をS3に保存する。(バケットポリシー情報) try: s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_Bucketpolicy_" + region_name + "_" + bucket_name + ".json") FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", bucket_policy, s3_file_name, is_cw_logger=True) except PmError as e: cw_logger.error("[%s] S3バケットポリシー情報のS3保存に失敗しました。(%s/%s)", aws_account, region_name, bucket_name) return CheckResult.Error # チェック処理 bucket_acl_abnormity = False bucket_policy_abnormity = False try: # Check-1. ACLによりAllUsersに操作権限が与えられたS3バケットが存在するか # Check-2. ACLによりAuthenticatedUsersに操作権限が与えられたS3バケットが存在するか for grant in bucket_acl["Grants"]: if (common_utils.check_key("URI", grant['Grantee'])): if grant['Grantee']["URI"] in ACL_URI: bucket_acl_abnormity = True break # Check-3. バケットポリシーのプリンシパルにて全てのユーザに操作権限が与えられたS3バケットが存在するか bucket_policy = ast.literal_eval(bucket_policy['Policy']) for statement in bucket_policy["Statement"]: if (statement["Effect"] == CommonConst.ALLOW and statement["Principal"] == CommonConst.ALL): bucket_policy_abnormity = True break if bucket_acl_abnormity is True or bucket_policy_abnormity is True: check_results.append( get_check_accessible_result(region_name, bucket_acl_abnormity, bucket_policy_abnormity, bucket_name)) except Exception as e: cw_logger.error("[%s] チェック処理中にエラーが発生しました。(%s/%s)", aws_account, region_name, bucket_name) return CheckResult.Error # Export File CHECK_ASC_ITEM_12_01.json try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_bucket = { 'AWSAccount': aws_account, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_bucket, result_json_path, format_json=True, is_cw_logger=True) except Exception as e: cw_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) return CheckResult.Error # チェック結果 if is_authorized is False: return CheckResult.Error if len(check_results) > 0: return CheckResult.CriticalDefect return CheckResult.Normal
def check_asc_item_16_01(trace_id, check_history_id, organization_id, project_id, aws_account, session, result_json_path): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) check_results = [] is_authorized = True # 取得したクレデンシャル情報を使用して、S3クライアントを作成します。 try: s3_client = S3Utils.get_s3_client(trace_id, session, aws_account) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) # S3バケット一覧を取得します。 try: list_buckets = asc_item_common_logic.get_list_buckets( trace_id, check_history_id, organization_id, project_id, s3_client, aws_account) except PmError as e: return CheckResult.Error for bucket in list_buckets["Buckets"]: bucket_name = bucket['Name'] region_name = None try: # 取得したS3バケット一覧情報ファイルをもとに、各バケットのリージョンを取得する。 region_name = S3Utils.get_bucket_location(trace_id, s3_client, bucket_name, aws_account) if region_name is None: region_name = CommonConst.US_EAST_REGION # 取得したS3バケット情報ファイルをもとに、該当のS3バケットのアクセスコントロールリストを取得する。 bucket_acl = get_bucket_acl( trace_id, check_history_id, organization_id, project_id, aws_account, region_name, bucket_name, s3_client) # 取得したS3バケット情報をもとに、S3のロギング情報を取得する。 bucket_logging = S3Utils.get_bucket_logging( trace_id, aws_account, s3_client, bucket_name, region_name) except PmError as e: if e.cause_error.response['Error'][ 'Code'] in CommonConst.S3_SKIP_EXCEPTION: error_operation = e.cause_error.operation_name, error_code = e.cause_error.response['Error']['Code'], error_message = e.cause_error.response['Error']['Message'] if region_name is None: region_name = CommonConst.ERROR check_results.append( asc_item_common_logic.get_error_authorized_result( region_name, bucket_name, error_operation, error_code, error_message)) is_authorized = False continue else: return CheckResult.Error # 取得したS3ロギング情報をS3に保存する(リソース情報ファイル)。 try: s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_ClientLogging_" + region_name + "_" + bucket_name + ".json") FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", bucket_logging, s3_file_name) except PmError as e: pm_logger.error("[%s] S3バケットロギング情報の取得に失敗しました。(%s/%s)", aws_account, region_name, bucket_name) return CheckResult.Error # チェック処理 bucket_abnormity = True try: # Check-1. ACLによりLogDeliveryに操作権限が与えられたS3バケットが存在するか for grant in bucket_acl["Grants"]: if (common_utils.check_key("URI", grant['Grantee']) and grant['Grantee']["URI"] == LOG_DELIVERY_URI): bucket_abnormity = False break # Check-2. S3バケットでログ記録が有効になっていないものは存在するか if bucket_abnormity is True and len(bucket_logging) == 0: result = { 'Region': region_name, 'Level': CommonConst.LEVEL_CODE_21, 'DetectionItem': { 'BucketName': bucket_name } } check_results.append(result) except Exception as e: pm_logger.error("[%s] チェック処理中にエラーが発生しました。(%s/%s)", aws_account, region_name, bucket_name) return CheckResult.Error # Export File CHECK_ASC_ITEM_16_01.json try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_asc_item_16_01 = { 'AWSAccount': aws_account, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_asc_item_16_01, result_json_path, True) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) return CheckResult.Error # チェック結果 if is_authorized is False: return CheckResult.Error if len(check_results) > 0: return CheckResult.CriticalDefect return CheckResult.Normal
def execute_check_results_assessment(trace_id, check_item_code, check_history_id, organization_id, project_id, aws_account, result_json_path, level): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format(check_history_id, organization_id, project_id, aws_account, "Assessment_Result.json") try: # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: assessment_items = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: assessment_items = pm_assessmentItems.query_organization_index_filter_awsaccount( trace_id, organization_id, project_id, aws_account) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) # 取得したクエリ結果をS3に保存します。(リソース情報ファイル) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", assessment_items, s3_file_name) except PmError as e: pm_logger.error("[%s] リソース情報ファイのS3保存に失敗しました。", aws_account) # チェック処理 try: check_results = [] assessment_result = None for assessment_item in assessment_items: if (assessment_item['CheckItemCode'] == check_item_code): assessment_result = { 'AssessmentComment': common_utils.get_value('AssessmentComment', assessment_item), 'UserID': common_utils.get_value('UserID', assessment_item), 'MailAddress': common_utils.get_value('MailAddress', assessment_item), 'CreatedAt': assessment_item['CreatedAt'] } break if (assessment_result is None): LEVEL_DIVISION = { "1": CommonConst.LEVEL_CODE_21, # 重大な不備 "2": CommonConst.LEVEL_CODE_11 # 軽微な不備 } result = { 'Region': 'Global', 'Level': LEVEL_DIVISION[level], 'DetectionItem': { 'NoEvaluation': True } } check_results.append(result) except Exception as e: pm_logger.error("[%s] チェック処理中にエラーが発生しました。", aws_account) raise common_utils.write_log_exception(e, pm_logger) # チェック結果JSONファイル try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_result_json = { 'AWSAccount': aws_account, 'CheckResults': check_results, 'DateTime': current_date } if assessment_result is not None: check_result_json['AssessmentResult'] = assessment_result FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", check_result_json, result_json_path) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) raise common_utils.write_log_exception(e, pm_logger) except Exception as e: pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) # チェック結果 if len(check_results) == 0: return CheckResult.Normal elif (level == CommonConst.LEVEL_CODE_1): return CheckResult.CriticalDefect return CheckResult.MinorInadequacies
def job_report(trace_id, email, report_id, code, job_id): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) try: report_job_def = pm_batchJobDefs.query_report_job_def_key( trace_id, code) except PmError as e: return common_utils.error_exception(MsgConst.ERR_402, HTTPStatus.INTERNAL_SERVER_ERROR, e, pm_logger, True), None if not report_job_def: pm_logger.error("ジョブ定義情報が取得できませんでした。:" + code) return common_utils.error_common(MsgConst.ERR_402, HTTPStatus.INTERNAL_SERVER_ERROR, pm_logger), None # ログID(UUID(v4)) log_id = common_utils.get_uuid4() # AWS Batch job_name = code + "-" + log_id job_queue = report_job_def['JobQueue'] job_definition = report_job_def['JobDefinition'] parameters = { "ReportID": "--reportId=" + report_id, "LogID": "--logId=" + log_id } container_overrides = {} if (common_utils.check_key('Environment', report_job_def) and len(report_job_def['Environment']) > 0): container_overrides = report_job_def['Environment'] max_retry = report_job_def['MaxRetry'] try: # submid job job_id, parameter = aws_common.submit_job(trace_id, job_name, job_queue, job_id, job_definition, parameters, container_overrides, max_retry) except PmError as e: return common_utils.error_exception(MsgConst.ERR_AWS_601, HTTPStatus.INTERNAL_SERVER_ERROR, e, pm_logger, True), None # Create ReportJobLogs user_id = trace_id # 配列内のオブジェクトとして格納されていたので、job_idのみを抽出する(暫定対応) job_id_key = job_id[0]['jobId'] date_now = common_utils.get_current_date() try: report_log = { 'ReportID': report_id, 'LogID': log_id, 'Code': code, 'UserID': user_id, 'MailAddress': email, 'JobID': job_id_key, 'Parameter': parameter, 'CreatedAt': date_now, 'UpdatedAt': date_now } s3_file_name = CommonConst.PATH_REPORT_BATCH.format( report_id, log_id + ".json") FileUtils.upload_json(trace_id, "S3_BATCH_LOG_BUCKET", report_log, s3_file_name) except PmError as e: return common_utils.error_exception(MsgConst.ERR_S3_701, HTTPStatus.INTERNAL_SERVER_ERROR, e, pm_logger, True), None return common_utils.response(None, pm_logger), job_id
def job_check_security(trace_id, project_id, check_history_id, code, job_id): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) try: batch_job_def = pm_batchJobDefs.query_report_job_def_key( trace_id, code) except PmError as e: pm_logger.error("バッチジョブ定義の取得に失敗しました。: %s", code) pm_logger.error(e) return False, None if not batch_job_def: pm_logger.error("ジョブ定義情報が取得できませんでした。:%s", code) return False, None # ログID(UUID(v4)) log_id = common_utils.get_uuid4() # AWS Batch job_name = code + "--" + check_history_id + "--" + log_id job_queue = batch_job_def['JobQueue'] job_definition = batch_job_def['JobDefinition'] check_history_id_param = check_history_id log_id_param = log_id if (code == 'CHECK_SECURITY_REPORT'): check_history_id_param = "--checkHistoryId=" + check_history_id log_id_param = "--logId=" + log_id parameters = { "CheckHistoryID": check_history_id_param, "LogID": log_id_param } container_overrides = {} if (common_utils.check_key('Environment', batch_job_def) and len(batch_job_def['Environment']) > 0): container_overrides = batch_job_def['Environment'] max_retry = batch_job_def['MaxRetry'] try: # submid job job_id, parameter = aws_common.submit_job(trace_id, job_name, job_queue, job_id, job_definition, parameters, container_overrides, max_retry) except PmError as e: pm_logger.error("セキュリティチェック処理ジョブ(%s)の送信に失敗しました。: ProjectID=%s", code, project_id) return False, None # セキュリティチェック結果レポート出力処理ジョブ履歴ファイルを作成し、S3に保存します。 job_id_key = job_id[0]['jobId'] date_now = common_utils.get_current_date() try: check_log = { 'ProjectID': project_id, 'CheckHistoryID': check_history_id, 'LogID': log_id, 'Code': code, 'UserID': 'Check-Job-Launcher', 'MailAddress': 'Check-Job-Launcher', 'JobID': job_id_key, 'Parameter': parameter, 'CreatedAt': date_now, 'UpdatedAt': date_now } s3_file_name = CommonConst.PATH_BATCH_CHECK_LOG.format( check_history_id, log_id + ".json") FileUtils.upload_json(trace_id, "S3_BATCH_LOG_BUCKET", check_log, s3_file_name) except PmError as e: pm_logger.error( "セキュリティチェックジョブ履歴ファイルの作成に失敗しました。: ProjectID=%s, LogID=%s", project_id, log_id) return False, None return True, job_id
def check_asc_item_13_01(trace_id, check_history_id, organization_id, project_id, aws_account, session, result_json_path): cw_logger = common_utils.begin_cw_logger(trace_id, __name__, inspect.currentframe()) check_results = [] is_authorized = True s3_client = S3Utils.get_s3_client(trace_id, session, aws_account, is_cw_logger=True) try: list_buckets = asc_item_common_logic.get_list_buckets( trace_id, check_history_id, organization_id, project_id, s3_client, aws_account) except PmError as e: return CheckResult.Error # 取得したS3バケットのアクセスコントロールリスト情報をS3に保存する(リソース情報ファイル)。 for bucket in list_buckets['Buckets']: bucket_name = bucket['Name'] region_name = None try: region_name = S3Utils.get_bucket_location( trace_id, s3_client, bucket_name, aws_account) if region_name is None: region_name = CommonConst.US_EAST_REGION bucket_encryption_rules = S3Utils.get_bucket_encryption( trace_id, s3_client, bucket_name, aws_account, region_name, is_cw_logger=True) except PmError as e: if e.cause_error.response['Error'][ 'Code'] in CommonConst.SERVER_SIDE_ENCRYPTION_CONFIGURATION_NOT_FOUND_ERROR: check_results.append(get_check_asc_item_13_01_result( region_name, bucket_name)) continue elif e.cause_error.response['Error'][ 'Code'] in CommonConst.S3_SKIP_EXCEPTION: error_operation = e.cause_error.operation_name, error_code = e.cause_error.response['Error']['Code'], error_message = e.cause_error.response['Error']['Message'] if region_name is None: region_name = CommonConst.ERROR check_results.append( asc_item_common_logic.get_error_authorized_result( region_name, bucket_name, error_operation, error_code, error_message)) is_authorized = False continue else: return CheckResult.Error if len(bucket_encryption_rules) == 0: continue try: s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_Encryption_" + region_name + "_" + bucket_name + ".json") FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", bucket_encryption_rules, s3_file_name, is_cw_logger=True) except PmError as e: cw_logger.error("[%s] S3バケット暗号化情報のS3保存に失敗しました。(%s/%s)", aws_account, region_name, bucket_name) return CheckResult.Error # チェック処理 try: for bucket_encryption_rule in bucket_encryption_rules: if (common_utils.check_key( "SSEAlgorithm", bucket_encryption_rule['ApplyServerSideEncryptionByDefault'] ) is False): check_results.append(get_check_asc_item_13_01_result( region_name, bucket_name)) break except Exception as e: cw_logger.error("[%s] チェック処理中にエラーが発生しました。(%s/%s)", aws_account, region_name, bucket_name) return CheckResult.Error # Export File json try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_asc_item_13_01 = { 'AWSAccount': aws_account, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_asc_item_13_01, result_json_path, format_json=True, is_cw_logger=True) except Exception as e: cw_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) return CheckResult.Error # チェック結果 if is_authorized is False: return CheckResult.Error if len(check_results) > 0: return CheckResult.CriticalDefect return CheckResult.Normal
def check_ibp_item_14_01(trace_id, check_history_id, organization_id, project_id, aws_account, session, result_json_path): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) check_results = [] cloud_front_client = CloudFrontUtils.get_cloud_front_client( trace_id, session, aws_account) # CloudFrontディストリビューションの一覧を取得する try: distribution_list = CloudFrontUtils.get_list_distributions( trace_id, cloud_front_client, aws_account) except PmError as e: return CheckResult.Error # 取得したディストリビューション一覧情報をS3に保存する(ディストリビューション一覧ファイル)。 try: s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "IBP/CloudFront_list_distributions.json") FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", distribution_list, s3_file_name) except PmError as e: pm_logger.error("[%s] ディストリビューション一覧情報のS3保存に失敗しました。", aws_account) return CheckResult.Error # Check-1. CloudFrontディストリビューションが存在するか is_exist_distribution_cloudFront = False try: if len(distribution_list) > 0: is_exist_distribution_cloudFront = True except Exception as e: pm_logger.error("[%s] チェック処理中にエラーが発生しました。", aws_account) return CheckResult.Error if is_exist_distribution_cloudFront is True: for distribution in distribution_list: distribution_id = distribution['Id'] # ディストリビューション一覧をもとに、各ディストリビューションの情報を取得します。 try: info_distribution = CloudFrontUtils.get_distribution( trace_id, cloud_front_client, distribution_id, aws_account) except PmError as e: return CheckResult.Error # ディストリビューション情報をS3に保管します。 try: s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "IBP/CloudFront_" + distribution_id + ".json") FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", info_distribution, s3_file_name) except PmError as e: pm_logger.error("[%s] ディストリビューション情報のS3保存に失敗しました。(%s)", aws_account, distribution_id) return CheckResult.Error # Check-2. CloudFrontのログ設定が無効なディストリビューションが存在するか try: if info_distribution['DistributionConfig']['Logging'][ 'Enabled'] is False: check_results.append( get_check_ibp_item_14_01_result(distribution_id)) except Exception as e: pm_logger.error("[%s] チェック処理中にエラーが発生しました。(%s)", aws_account, distribution_id) return CheckResult.Error # Export File CHECK_IBP_ITEM_14_01.json try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_ibp_item_14_01 = { 'AWSAccount': aws_account, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", check_ibp_item_14_01, result_json_path) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) return CheckResult.Error # チェック結果 if len(check_results) > 0: return CheckResult.CriticalDefect return CheckResult.Normal