def get_bucket_acl(trace_id, check_history_id, organization_id, project_id, aws_account, region_name, bucket_name, s3_client): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_ACL_" + region_name + "_" + bucket_name + ".json") # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: bucket_acl = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: bucket_acl = S3Utils.get_bucket_acl( trace_id, s3_client, bucket_name, aws_account, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) # 取得したS3バケットのアクセスコントロールリスト情報をS3に保存する。(アクセスコントロールリスト情報) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", bucket_acl, s3_file_name) except PmError as e: pm_logger.error("[%s] S3バケットACL情報のS3保存に失敗しました。(%s)/(%s)", aws_account, region_name, bucket_name) return bucket_acl
def get_list_buckets(trace_id, check_history_id, organization_id, project_id, s3_client, aws_account): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, aws_account, "ASC/S3_ListBuckets.json") # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: list_buckets = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_exception(e, pm_logger) else: # S3バケット一覧を取得します。 try: list_buckets = S3Utils.list_buckets(trace_id, s3_client, aws_account) except PmError as e: raise common_utils.write_log_exception(e, pm_logger) # S3バケット一覧情報をS3に保存します。 try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", list_buckets, s3_file_name) except PmError as e: pm_logger.error("[%s] S3バケット一覧情報のS3保存に失敗しました。", aws_account) raise common_utils.write_log_exception(e, pm_logger) return list_buckets
def get_account_password_policy(trace_id, check_history_id, organization_id, project_id, awsaccount, session, result_json_path): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "IBP/IAM_AccountPasswordPolicy.json") # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: account_password_policy = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: account_password_policy = IAMUtils.get_account_password_policy( trace_id, session, awsaccount) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) # アカウントパスワードポリシー情報をS3に保存します。 try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", account_password_policy, s3_file_name) except PmError as e: pm_logger.error("[%s] アカウントパスワードポリシー情報のS3保存に失敗しました。", awsaccount) raise common_utils.write_log_pm_error(e, pm_logger) return account_password_policy
def test_check_exists_file_s3_success_case_exists_file_s3(self): # connect s3 resource_s3 = s3_utils.resource_connect() client_s3 = s3_utils.client_connect() check_bucket = common_utils.get_environ("S3_CHECK_BUCKET") # prepare data resource_s3.create_bucket(Bucket=check_bucket) client_s3.put_object(Body=json.dumps( copy.deepcopy(DataTestS3.INFO_BUCKET)), Bucket=check_bucket, Key=s3_file_name) mybucket = resource_s3.Bucket(name=check_bucket) with patch.object(boto3, 'resource') as mock_method_resource: mock_method_resource.return_value = resource_s3 with patch.object(resource_s3, 'Bucket') as mock_method_bucket: mock_method_bucket.return_value = mybucket # call function test actual_response = aws_common.check_exists_file_s3( trace_id, "S3_CHECK_BUCKET", s3_file_name) # check result expected_response = True self.assertEqual(expected_response, actual_response) # check connect resource mock_method_resource.assert_called_with('s3')
def get_trail_event_selectors(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, trail_client, cloud_trail): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "CloudTrail_EventSelectors_" + region_name + "_" + cloud_trail["Name"] + ".json") event_selectors = [] # Check Exist File リソース情報ファイル if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: event_selectors = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: event_selectors = CloudTrailUtils.get_event_selectors( trace_id, awsaccount, trail_client, region_name, cloud_trail["TrailARN"]) # Upload File リソース情報ファイル try: FileUtils.upload_s3(trace_id, event_selectors, s3_file_name, True) except PmError as e: pm_logger.error("[%s/%s] CloudTrailのイベントセレクタ情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) return event_selectors
def test_check_exists_file_s3_success_case_not_exists_file_s3(self): # connect s3 resource_s3 = s3_utils.resource_connect() check_bucket = common_utils.get_environ("S3_CHECK_BUCKET") s3_file_name_not_exists = "check_history_id/organization_id/project_id/awsaccount/raw/filenamenotexists.json" # prepare data resource_s3.create_bucket(Bucket=check_bucket) mybucket = resource_s3.Bucket(name=check_bucket) with patch.object(boto3, 'resource') as mock_method_resource: mock_method_resource.return_value = resource_s3 with patch.object(resource_s3, 'Bucket') as mock_method_bucket: mock_method_bucket.return_value = mybucket # call function test actual_response = aws_common.check_exists_file_s3( trace_id, "S3_CHECK_BUCKET", s3_file_name_not_exists) # check result expected_response = False self.assertEqual(expected_response, actual_response) # check connect resource mock_method_resource.assert_called_with('s3')
def test_check_exists_file_s3_error_resource(self): expected_error_response = copy.deepcopy(DataCommon.ERROR_RESPONSE) expected_operation_name = copy.deepcopy(DataCommon.OPERATION_NAME) # create mock error resource s3 self.create_mock_boto3_resource_error() with self.assertRaises(PmError) as exception: # call function test aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name) # check error actual_cause_error = exception.exception.cause_error self.assertEqual(expected_error_response['Error'], actual_cause_error.response['Error']) self.assertEqual(expected_operation_name, actual_cause_error.operation_name)
def test_check_exists_file_s3_error_call_bucket(self): # connect s3 resource_connect_s3 = s3_utils.resource_connect() expected_error_response = copy.deepcopy(DataCommon.ERROR_RESPONSE) expected_operation_name = copy.deepcopy(DataCommon.OPERATION_NAME) # create mock throw error when called function Bucket with patch.object(resource_connect_s3, 'Bucket') as mock_method_bucket: mock_method_bucket.side_effect = ClientError( error_response=expected_error_response, operation_name=expected_operation_name) with self.assertRaises(PmError) as exception: # call function test aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name) # check error actual_cause_error = exception.exception.cause_error self.assertEqual(expected_error_response['Error'], actual_cause_error.response['Error']) self.assertEqual(expected_operation_name, actual_cause_error.operation_name)
def list_subscriptions_by_topic(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, sns_client, topic_arn): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) topic_name = topic_arn.split(":")[5] s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "SNS_Topic_Subscriptions_Info_" + region_name + "_" + topic_name + ".json") subscriptions = [] # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: subscriptions = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にCloudTrail情報を取得する。 try: subscriptions = SNSUtils.list_subscriptions_by_topic( trace_id, sns_client, awsaccount, region_name, topic_arn) except PmError as e: data_body = {'TopicArn': topic_arn} e.pm_notification_error = PmNotificationError( code_error=CommonConst.KEY_CODE_ERROR_GET_SUBSCRIPTIONS, data_body=data_body) raise common_utils.write_log_pm_error(e, pm_logger) # 取得したSNS Topicサブスクリプション情報情報をS3に保存する(リソース情報ファイル) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", subscriptions, s3_file_name) except PmError as e: pm_logger.error("[%s/%s] SNS Topicサブスクリプション情報の情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) # 情報の取得件数が0でした。 if (len(subscriptions) == 0): pm_logger.warning("[%s/%s] SNS Topicサブスクリプション情報情報の取得件数が0でした。", awsaccount, region_name) return subscriptions
def describe_metric_filters(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, logs_client, cloud_trail_log_group_name): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "CloudWatch_Logs_Metric_Filters_Info_" + region_name + "_" + cloud_trail_log_group_name.replace( CommonConst.SLASH, "-SLASH-") + ".json") metric_filters = [] # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: metric_filters = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: metric_filters = LogsUtils.describe_metric_filters( trace_id, logs_client, awsaccount, region_name, cloud_trail_log_group_name) except PmError as e: data_body = {'CloudTrailLogGroupName': cloud_trail_log_group_name} e.pm_notification_error = PmNotificationError( code_error=CommonConst.KEY_CODE_ERROR_GET_METRIC_FILTERS, data_body=data_body) raise common_utils.write_log_pm_error(e, pm_logger) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", metric_filters, s3_file_name) except PmError as e: pm_logger.error("[%s/%s] メトリクスフィルタ情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) # 情報の取得件数が0でした。 if (len(metric_filters) == 0): pm_logger.warning("[%s/%s] メトリクスフィルタ情報の取得件数が0でした。", awsaccount, region_name) return metric_filters
def describe_alarms(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, cloudwatch_client): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "CloudWatch_Alarm_Info_" + region_name + ".json") metric_alarms = [] # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: metric_alarms = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: metric_alarms = CloudWatchlUtils.describe_alarms( trace_id, awsaccount, cloudwatch_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) # 取得したCloudWatchAlarm情報をS3に保存する(リソース情報ファイル) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", metric_alarms, s3_file_name) except PmError as e: pm_logger.error("[%s/%s] CloudWatchAlarmの情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) # 情報の取得件数が0でした。 if (len(metric_alarms) == 0): pm_logger.warning("[%s/%s] CloudWatchAlarm情報の取得件数が0でした。", awsaccount, region_name) return metric_alarms
def get_cloud_trails(trace_id, check_history_id, organization_id, project_id, awsaccount, region_name, session): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "CloudTrail_" + region_name + ".json") cloud_trails = [] # Check Exist File S3 CloudTrail_{region_name}.json if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: cloud_trails = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にCloudTrail情報を取得する。 trail_client = CloudTrailUtils.get_trail_client( trace_id, session, region_name, awsaccount) try: cloud_trails = CloudTrailUtils.describe_cloud_trails( trace_id, awsaccount, trail_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) if (len(cloud_trails) == 0): pm_logger.warning("[%s/%s] CloudTrail情報の取得件数が0でした。", awsaccount, region_name) # 取得したCloudTrail情報をS3に保存する(リソース情報ファイル) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", cloud_trails, s3_file_name) except PmError as e: pm_logger.error("[%s/%s] CloudTrailの情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) return cloud_trails
def execute_security_group_port(trace_id, check_history_id, organization_id, project_id, awsaccount, session, result_json_path, port, check_item_code, excluded_resources): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) check_results = [] # Export File VPC_SecurityGroups_{region}.json try: regions = aws_common.get_regions(trace_id, session) except PmError as e: pm_logger.error("Regionの情報の取得に失敗しました。") e.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(e, pm_logger) for region in regions: region_name = region["RegionName"] try: if region_name in REGION_IGNORE: continue ec2_client = Ec2Utils.get_ec2_client(trace_id, session, region_name, awsaccount) s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "VPC_SecurityGroups_" + region_name + ".json") if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: security_groups = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: security_groups = Ec2Utils.describe_security_groups( trace_id, awsaccount, ec2_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) if (len(security_groups) == 0): pm_logger.info("[%s/%s] セキュリティグループ情報の取得件数が0でした。", awsaccount, region_name) continue try: FileUtils.upload_s3(trace_id, security_groups, s3_file_name, True) except PmError as e: pm_logger.error("[%s/%s] セキュリティグループ情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) try: for security_group in security_groups: # check excluded resources resource_name = security_group['GroupId'] if common_utils.check_excluded_resources( check_item_code, region_name, ResourceType.GroupId, resource_name, excluded_resources): continue for ip_permission in security_group['IpPermissions']: if ip_permission['IpProtocol'] != '-1': if ip_permission['IpProtocol'] != CommonConst.TCP: continue if common_utils.check_key( 'FromPort', ip_permission ) is False or ip_permission['FromPort'] > port: continue if common_utils.check_key( 'ToPort', ip_permission ) is False or ip_permission['ToPort'] < port: continue for ip_range in ip_permission['IpRanges']: if common_utils.check_key('CidrIp', ip_range): if (CommonConst.CIDR_IP_NOT_SECURITY == ip_range['CidrIp']): check_result = get_check_result( security_group, ip_permission, ip_range, region_name) check_results.append(check_result) break except Exception as e: pm_logger.error("[%s/%s] チェック処理中にエラーが発生しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) except Exception as e: pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, region=region_name, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) result_security_group = CheckResult.Normal if (len(check_results) > 0): result_security_group = CheckResult.CriticalDefect # 検出結果を1つのチェック結果JSONファイルに保存する。 try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_rule_security_group = { 'AWSAccount': awsaccount, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_rule_security_group, result_json_path, True) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", awsaccount) pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) return result_security_group
def check_cis_item_4_03(trace_id, check_history_id, organization_id, project_id, awsaccount, session, result_json_path, check_item_code, excluded_resources): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) check_results = [] try: regions = aws_common.get_regions(trace_id, session) except PmError as e: pm_logger.error("Regionの情報の取得に失敗しました。") e.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(e, pm_logger) for region in regions: region_name = region["RegionName"] try: if region_name in REGION_IGNORE: continue ec2_client = Ec2Utils.get_ec2_client(trace_id, session, region_name, awsaccount) # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にセキュリティグループ情報を取得する。 s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "VPC_SecurityGroups_" + region_name + ".json") if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: security_groups = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: security_groups = Ec2Utils.describe_security_groups( trace_id, awsaccount, ec2_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) if (len(security_groups) == 0): pm_logger.info("[%s/%s] セキュリティグループ情報の取得件数が0でした。", awsaccount, region_name) try: if (len(security_groups) > 0): FileUtils.upload_s3(trace_id, security_groups, s3_file_name, True) except PmError as e: pm_logger.error("[%s/%s] セキュリティグループ情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にEC2インスタンス情報を取得する。 s3_file_name_iam_instances = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "IAM_Instances_" + region_name + ".json") if (aws_common.check_exists_file_s3( trace_id, "S3_CHECK_BUCKET", s3_file_name_iam_instances)) is True: try: reservation_instances = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", s3_file_name_iam_instances) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: reservation_instances = Ec2Utils.describe_instances( trace_id, awsaccount, ec2_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) if (len(reservation_instances) == 0): pm_logger.info("[%s/%s] EC2インスタンス情報の取得件数が0でした。", awsaccount, region_name) try: if (len(reservation_instances) > 0): s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "VPC_SG_Instances_" + region_name + ".json") FileUtils.upload_s3(trace_id, reservation_instances, s3_file_name, True) except PmError as e: pm_logger.error("[%s/%s] EC2インスタンス情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) check1 = [] check2 = [] try: # リソース情報ファイルのセキュリティグループ情報から、インバウンドルール、アウトバウンドルールを設定しているdefaultセキュリティグループを検出する。 for security_group in security_groups: # check excluded resources resource_name = security_group['GroupId'] if common_utils.check_excluded_resources( check_item_code, region_name, ResourceType.GroupId, resource_name, excluded_resources): continue if (security_group['GroupName'] == CommonConst.DEFAULT and len(security_group['IpPermissions']) > 0 and len(security_group['IpPermissionsEgress']) > 0): check1.append(security_group['GroupId']) # リソース情報ファイルのEC2インスタンス情報から、defaultセキュリティグループをアタッチしたEC2インスタンスを検出する、。 for reservation_instance in reservation_instances: for instance in reservation_instance['Instances']: for security_group in instance['SecurityGroups']: if security_group[ 'GroupName'] == CommonConst.DEFAULT: if common_utils.check_key('Tags', instance) is True: name_tag = next( filter( lambda tag: tag['Key'] == 'Name', instance['Tags']), None) instance[ 'InstanceName'] = None if name_tag is None else name_tag[ 'Value'] check2.append(instance) if (len(check1) > 0 or len(check2) > 0): check_results.append( get_check_cis_item_4_03_result(check1, check2, region_name)) except Exception as e: pm_logger.error("[%s/%s] チェック処理中にエラーが発生しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) except Exception as e: pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, region=region_name, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) # Export File CHECK_CIS12_ITEM_4_03.json try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_cis_item_4_03 = { 'AWSAccount': awsaccount, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_cis_item_4_03, result_json_path, True) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", awsaccount) pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) # チェック結果 if len(check_results) > 0: return CheckResult.MinorInadequacies return CheckResult.Normal
def execute_check_results_assessment(trace_id, check_item_code, check_history_id, organization_id, project_id, aws_account, result_json_path, level): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) s3_file_name = CommonConst.PATH_CHECK_RAW.format(check_history_id, organization_id, project_id, aws_account, "Assessment_Result.json") try: # リソース情報取得 if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: assessment_items = FileUtils.read_json(trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: assessment_items = pm_assessmentItems.query_organization_index_filter_awsaccount( trace_id, organization_id, project_id, aws_account) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) # 取得したクエリ結果をS3に保存します。(リソース情報ファイル) try: FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", assessment_items, s3_file_name) except PmError as e: pm_logger.error("[%s] リソース情報ファイのS3保存に失敗しました。", aws_account) # チェック処理 try: check_results = [] assessment_result = None for assessment_item in assessment_items: if (assessment_item['CheckItemCode'] == check_item_code): assessment_result = { 'AssessmentComment': common_utils.get_value('AssessmentComment', assessment_item), 'UserID': common_utils.get_value('UserID', assessment_item), 'MailAddress': common_utils.get_value('MailAddress', assessment_item), 'CreatedAt': assessment_item['CreatedAt'] } break if (assessment_result is None): LEVEL_DIVISION = { "1": CommonConst.LEVEL_CODE_21, # 重大な不備 "2": CommonConst.LEVEL_CODE_11 # 軽微な不備 } result = { 'Region': 'Global', 'Level': LEVEL_DIVISION[level], 'DetectionItem': { 'NoEvaluation': True } } check_results.append(result) except Exception as e: pm_logger.error("[%s] チェック処理中にエラーが発生しました。", aws_account) raise common_utils.write_log_exception(e, pm_logger) # チェック結果JSONファイル try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_result_json = { 'AWSAccount': aws_account, 'CheckResults': check_results, 'DateTime': current_date } if assessment_result is not None: check_result_json['AssessmentResult'] = assessment_result FileUtils.upload_json(trace_id, "S3_CHECK_BUCKET", check_result_json, result_json_path) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", aws_account) raise common_utils.write_log_exception(e, pm_logger) except Exception as e: pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) # チェック結果 if len(check_results) == 0: return CheckResult.Normal elif (level == CommonConst.LEVEL_CODE_1): return CheckResult.CriticalDefect return CheckResult.MinorInadequacies