import pytest

from api.cas.cache_config import CacheMode, CacheLineSize, CacheModeTrait
from api.cas.casadm import OutputFormat, print_statistics, start_cache
from core.test_run import TestRun
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
from test_tools.dd import Dd
from test_tools.disk_utils import Filesystem
from test_utils.size import Size, Unit

iterations = 64
cache_size = Size(8, Unit.GibiByte)


@pytest.mark.parametrizex("cache_line_size", CacheLineSize)
@pytest.mark.parametrizex("cache_mode", CacheMode.with_any_trait(
    CacheModeTrait.InsertRead | CacheModeTrait.InsertWrite))
@pytest.mark.parametrizex("test_object", ["cache", "core"])
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_output_consistency(cache_line_size, cache_mode, test_object):
    """
        title: Test consistency between different cache and core statistics' outputs.
        description: |
          Check if OpenCAS's statistics for cache and core are consistent
          regardless of the output format.
        pass_criteria:
          - Statistics in CSV format matches statistics in table format.
    """
    with TestRun.step("Prepare cache and core."):
        cache_dev = TestRun.disks['cache']
        cache_dev.create_partitions([cache_size])
Пример #2
0
    CacheMode,
    CacheModeTrait,
    CleaningPolicy,
    FlushParametersAcp,
    CacheLineSize,
    Time,
)
from test_tools.blktrace import BlkTrace, BlkTraceMask, ActionKind, RwbsKind


@pytest.mark.parametrizex(
    "cache_line_size",
    [CacheLineSize.LINE_4KiB, CacheLineSize.LINE_16KiB, CacheLineSize.LINE_64KiB],
)
@pytest.mark.parametrizex(
    "cache_mode", CacheMode.with_any_trait(CacheModeTrait.LazyWrites)
)
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_acp_param_flush_max_buffers(cache_line_size, cache_mode):
    """
        title: Functional test for ACP flush-max-buffers parameter.
        description: |
          Verify if there is appropriate number of I/O requests between wake-up time intervals,
          which depends on flush-max-buffer parameter.
        pass_criteria:
          - ACP triggered dirty data flush
          - Number of writes to core is lower or equal than flush_max_buffers
    """
    with TestRun.step("Test prepare."):
        buffer_values = get_random_list(
Пример #3
0
            write_counter += 1
        TestRun.LOGGER.info(f"Writes to chunk: {write_counter}")

        if all_writes_ok:
            TestRun.LOGGER.info("All sectors written in proper order.")


@pytest.mark.parametrizex(
    "cache_line_size",
    [
        CacheLineSize.LINE_4KiB, CacheLineSize.LINE_16KiB,
        CacheLineSize.LINE_64KiB
    ],
)
@pytest.mark.parametrizex("cache_mode",
                          CacheMode.with_any_trait(CacheModeTrait.LazyWrites))
@pytest.mark.require_disk("cache",
                          DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeSet([DiskType.hdd, DiskType.hdd4k]))
def test_acp_param_flush_max_buffers(cache_line_size, cache_mode):
    """
        title: Functional test for ACP flush-max-buffers parameter.
        description: |
          Verify if there is appropriate number of I/O requests between wake-up time intervals,
          which depends on flush-max-buffer parameter.
        pass_criteria:
          - ACP triggered dirty data flush
          - Number of writes to core is lower or equal than flush_max_buffers
    """
    with TestRun.step("Test prepare."):
        buffer_values = get_random_list(
Пример #4
0
from api.cas.cache_config import CacheMode, CacheLineSize, CacheModeTrait
from api.cas.casadm import OutputFormat, print_statistics, start_cache
from core.test_run import TestRun
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
from test_tools.dd import Dd
from test_tools.disk_utils import Filesystem
from test_utils.size import Size, Unit

iterations = 64
cache_size = Size(8, Unit.GibiByte)


@pytest.mark.parametrizex("cache_line_size", CacheLineSize)
@pytest.mark.parametrizex(
    "cache_mode",
    CacheMode.with_any_trait(CacheModeTrait.InsertRead
                             | CacheModeTrait.InsertWrite))
@pytest.mark.parametrizex("test_object", ["cache", "core"])
@pytest.mark.require_disk("cache",
                          DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_output_consistency(cache_line_size, cache_mode, test_object):
    """
        title: Test consistency between different cache and core statistics' outputs.
        description: |
          Check if OpenCAS's statistics for cache and core are consistent
          regardless of the output format.
        pass_criteria:
          - Statistics in CSV format matches statistics in table format.
    """
    with TestRun.step("Prepare cache and core."):
        cache_dev = TestRun.disks['cache']