def pass_criteria(): """Define pass criteria for the statistics.""" arch = platform.machine() host_kernel = get_kernel_version(level=1) return { "Avg": criteria.EqualWith(LATENCY_AVG_BASELINES[arch][host_kernel]) }
def __init__(self, request): """Initialize the instance.""" self._results_file = None test_name = request.node.originalname self._root_path = defs.TEST_RESULTS_DIR # Create the root directory, if it doesn't exist. self._root_path.mkdir(exist_ok=True) self._results_file = os.path.join( self._root_path, "{}_results_{}.json".format(test_name, utils.get_kernel_version(level=1)))
def snapshot_resume_measurements(vm_type): """Define measurements for snapshot resume tests.""" load_latency = LOAD_LATENCY_BASELINES[platform.machine()][vm_type] if is_io_uring_supported(): # There is added latency caused by the io_uring syscalls used by the # block device. load_latency["target"] += 115 if compare_versions(get_kernel_version(), "5.4.0") > 0: # Host kernels >= 5.4 add an up to ~30ms latency. # See: https://github.com/firecracker-microvm/firecracker/issues/2129 load_latency["target"] += 30 latency = types.MeasurementDef.create_measurement( "latency", "ms", [function.Max("max")], {"max": criteria.LowerThan(load_latency)}) return [latency]
from framework.builder import MicrovmBuilder from framework.matrix import TestContext, TestMatrix from framework.stats import core from framework.stats.baseline import Provider as BaselineProvider from framework.stats.metadata import DictProvider as DictMetadataProvider from framework.utils import get_cpu_percent, get_kernel_version, \ is_io_uring_supported, CmdBuilder, DictQuery, run_cmd from framework.utils_cpuid import get_cpu_model_name, get_instance_type import host_tools.drive as drive_tools import host_tools.network as net_tools # pylint: disable=import-error import framework.stats as st from integration_tests.performance.configs import defs from integration_tests.performance.utils import handle_failure TEST_ID = "block_performance" kernel_version = get_kernel_version(level=1) CONFIG_NAME_REL = "test_{}_config_{}.json".format(TEST_ID, kernel_version) CONFIG_NAME_ABS = os.path.join(defs.CFG_LOCATION, CONFIG_NAME_REL) CONFIG = json.load(open(CONFIG_NAME_ABS, encoding='utf-8')) DEBUG = False FIO = "fio" # Measurements tags. CPU_UTILIZATION_VMM = "cpu_utilization_vmm" CPU_UTILIZATION_VMM_SAMPLES_TAG = "cpu_utilization_vmm_samples" CPU_UTILIZATION_VCPUS_TOTAL = "cpu_utilization_vcpus_total" # pylint: disable=R0903 class BlockBaselinesProvider(BaselineProvider):
from framework.builder import MicrovmBuilder, SnapshotBuilder, SnapshotType from framework.matrix import TestContext, TestMatrix from framework.stats import core from framework.stats.baseline import Provider as BaselineProvider from framework.stats.metadata import DictProvider as DictMetadataProvider from framework.utils import get_kernel_version, DictQuery from framework.utils_cpuid import get_cpu_model_name, get_instance_type import host_tools.drive as drive_tools import host_tools.network as net_tools # pylint: disable=import-error import framework.stats as st from integration_tests.performance.configs import defs from integration_tests.performance.utils import handle_failure TEST_ID = "snap_restore_performance" CONFIG_NAME_REL = "test_{}_config_{}.json".format( TEST_ID, get_kernel_version(level=1)) CONFIG_NAME_ABS = os.path.join(defs.CFG_LOCATION, CONFIG_NAME_REL) CONFIG_DICT = json.load(open(CONFIG_NAME_ABS, encoding='utf-8')) DEBUG = False BASE_VCPU_COUNT = 1 BASE_MEM_SIZE_MIB = 128 BASE_NET_COUNT = 1 BASE_BLOCK_COUNT = 1 USEC_IN_MSEC = 1000 # Measurements tags. RESTORE_LATENCY = "restore_latency" # Define 4 net device configurations. net_ifaces = [NetIfaceConfig(),