def test_describe_instance_error(self): # create data mock ec2_client = ec2_utils_mock.client_connect() with patch.object(PmLogAdapter, 'error', return_value=None) as mock_error: with patch.object(ec2_client, 'describe_instances') as mock_method: mock_method.side_effect = ClientError( { 'Error': { 'Code': data_client_error['service_error_code'], 'Message': data_client_error['service_message'] } }, 'EXCEPTION') with self.assertRaises(PmError) as exception: # Call function test ec2_utils.describe_instances(trace_id, aws_account, ec2_client, region_name) cause_error = exception.exception.cause_error.response['Error'] # Check result self.assertEqual(data_client_error['service_error_code'], cause_error['Code']) self.assertEqual(data_client_error['service_message'], cause_error['Message']) mock_error.assert_any_call("[%s/%s] EC2インスタンス情報の取得に失敗しました。", aws_account, region_name)
def test_describe_instances_not_exist(self): # create data mock ec2_client = ec2_utils_mock.client_connect() # Call function test actual_instances = ec2_utils.describe_instances( trace_id, aws_account, ec2_client, region_name) # Check result self.assertListEqual([], actual_instances)
def test_describe_instances_exist_next_token(self): # create data mock ec2_client = ec2_utils_mock.client_connect() with patch.object(ec2_client, 'describe_instances') as mock_obj: mock_obj.side_effect = ec2_utils_mock.side_effect_describe_instances # Call function test actual_instances = ec2_utils.describe_instances( trace_id, aws_account, ec2_client, region_name) # Check result expect_resual = copy.deepcopy( DataTestEC2.DATA_CHECK_ALL_DESCRIBE_INSTANCES['Reservations']) self.assertListEqual(expect_resual, actual_instances)
def test_describe_instances_success(self): # create data mock ec2_resource_connect = ec2_utils_mock.resource_connect() ec2_client = ec2_utils_mock.client_connect() ec2_resource_connect.create_instances(ImageId='', MinCount=1, MaxCount=1) expect_response = ec2_client.describe_instances() # Call function test actual_instances = ec2_utils.describe_instances( trace_id, aws_account, ec2_client, region_name) # Check result self.assertListEqual(expect_response['Reservations'], actual_instances)
def check_cis_item_4_03(trace_id, check_history_id, organization_id, project_id, awsaccount, session, result_json_path, check_item_code, excluded_resources): pm_logger = common_utils.begin_logger(trace_id, __name__, inspect.currentframe()) check_results = [] try: regions = aws_common.get_regions(trace_id, session) except PmError as e: pm_logger.error("Regionの情報の取得に失敗しました。") e.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(e, pm_logger) for region in regions: region_name = region["RegionName"] try: if region_name in REGION_IGNORE: continue ec2_client = Ec2Utils.get_ec2_client(trace_id, session, region_name, awsaccount) # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にセキュリティグループ情報を取得する。 s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "VPC_SecurityGroups_" + region_name + ".json") if (aws_common.check_exists_file_s3(trace_id, "S3_CHECK_BUCKET", s3_file_name)) is True: try: security_groups = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", s3_file_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: security_groups = Ec2Utils.describe_security_groups( trace_id, awsaccount, ec2_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) if (len(security_groups) == 0): pm_logger.info("[%s/%s] セキュリティグループ情報の取得件数が0でした。", awsaccount, region_name) try: if (len(security_groups) > 0): FileUtils.upload_s3(trace_id, security_groups, s3_file_name, True) except PmError as e: pm_logger.error("[%s/%s] セキュリティグループ情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) # 対象のAWSアカウントのリージョンごと(GovCloud、北京を除く)にEC2インスタンス情報を取得する。 s3_file_name_iam_instances = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "IAM_Instances_" + region_name + ".json") if (aws_common.check_exists_file_s3( trace_id, "S3_CHECK_BUCKET", s3_file_name_iam_instances)) is True: try: reservation_instances = FileUtils.read_json( trace_id, "S3_CHECK_BUCKET", s3_file_name_iam_instances) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) else: try: reservation_instances = Ec2Utils.describe_instances( trace_id, awsaccount, ec2_client, region_name) except PmError as e: raise common_utils.write_log_pm_error(e, pm_logger) if (len(reservation_instances) == 0): pm_logger.info("[%s/%s] EC2インスタンス情報の取得件数が0でした。", awsaccount, region_name) try: if (len(reservation_instances) > 0): s3_file_name = CommonConst.PATH_CHECK_RAW.format( check_history_id, organization_id, project_id, awsaccount, "VPC_SG_Instances_" + region_name + ".json") FileUtils.upload_s3(trace_id, reservation_instances, s3_file_name, True) except PmError as e: pm_logger.error("[%s/%s] EC2インスタンス情報のS3保存に失敗しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) check1 = [] check2 = [] try: # リソース情報ファイルのセキュリティグループ情報から、インバウンドルール、アウトバウンドルールを設定しているdefaultセキュリティグループを検出する。 for security_group in security_groups: # check excluded resources resource_name = security_group['GroupId'] if common_utils.check_excluded_resources( check_item_code, region_name, ResourceType.GroupId, resource_name, excluded_resources): continue if (security_group['GroupName'] == CommonConst.DEFAULT and len(security_group['IpPermissions']) > 0 and len(security_group['IpPermissionsEgress']) > 0): check1.append(security_group['GroupId']) # リソース情報ファイルのEC2インスタンス情報から、defaultセキュリティグループをアタッチしたEC2インスタンスを検出する、。 for reservation_instance in reservation_instances: for instance in reservation_instance['Instances']: for security_group in instance['SecurityGroups']: if security_group[ 'GroupName'] == CommonConst.DEFAULT: if common_utils.check_key('Tags', instance) is True: name_tag = next( filter( lambda tag: tag['Key'] == 'Name', instance['Tags']), None) instance[ 'InstanceName'] = None if name_tag is None else name_tag[ 'Value'] check2.append(instance) if (len(check1) > 0 or len(check2) > 0): check_results.append( get_check_cis_item_4_03_result(check1, check2, region_name)) except Exception as e: pm_logger.error("[%s/%s] チェック処理中にエラーが発生しました。", awsaccount, region_name) raise common_utils.write_log_pm_error(e, pm_logger) except Exception as e: pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, region=region_name, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) # Export File CHECK_CIS12_ITEM_4_03.json try: current_date = date_utils.get_current_date_by_format( date_utils.PATTERN_YYYYMMDDHHMMSS) check_cis_item_4_03 = { 'AWSAccount': awsaccount, 'CheckResults': check_results, 'DateTime': current_date } FileUtils.upload_s3(trace_id, check_cis_item_4_03, result_json_path, True) except Exception as e: pm_logger.error("[%s] チェック結果JSONファイルの保存に失敗しました。", awsaccount) pm_error = common_utils.write_log_exception(e, pm_logger) pm_error.pm_notification_error = PmNotificationError( check_item_code=check_item_code, code_error=CommonConst.KEY_CODE_ERROR_DEFAULT) raise common_utils.write_log_pm_error(pm_error, pm_logger) # チェック結果 if len(check_results) > 0: return CheckResult.MinorInadequacies return CheckResult.Normal