コード例 #1
0
def prepare_md_dump(cache_device, core_device, cls, cache_id):
    with TestRun.step("Setup WB cache instance with one core"):
        cache = casadm.start_cache(
            cache_dev=cache_device,
            cache_line_size=cls,
            cache_mode=CacheMode.WB,
            cache_id=cache_id,
            force=True,
        )
        cache.add_core(core_device)

    with TestRun.step("Get metadata size"):
        dmesg_out = TestRun.executor.run_expect_success("dmesg").stdout
        md_size = dmesg.get_metadata_size(dmesg_out)

    with TestRun.step("Dump the metadata of the cache"):
        dump_file_path = "/tmp/test_activate_corrupted.dump"
        md_dump = File(dump_file_path)
        md_dump.remove(force=True, ignore_errors=True)

        dd_count = int(md_size / Size(1, Unit.MebiByte)) + 1
        (Dd().input(cache_device.path).output(md_dump.full_path).block_size(
            Size(1, Unit.MebiByte)).count(dd_count).run())
        md_dump.refresh_item()

    with TestRun.step("Stop cache device"):
        cache.stop()

        return md_dump
コード例 #2
0
def test_load_after_clean_shutdown(reboot_type, cache_mode, filesystem):
    """
        title: Planned system shutdown test.
        description: Test for data consistency after clean system shutdown.
        pass_criteria:
          - DUT should reboot successfully.
          - Checksum of file on core device should be the same before and after reboot.
    """
    with TestRun.step("Prepare CAS device."):
        cache_disk = TestRun.disks['cache']
        cache_disk.create_partitions([Size(1, Unit.GibiByte)])
        cache_dev = cache_disk.partitions[0]
        core_dev = TestRun.disks['core']
        cache = casadm.start_cache(cache_dev, cache_mode, force=True)
        core = cache.add_core(core_dev)
        core.create_filesystem(filesystem,
                               blocksize=int(Size(1, Unit.Blocks4096)))
        core.mount(mount_point)

    with TestRun.step("Create file on cache and count its checksum."):
        test_file = File(os.path.join(mount_point, "test_file"))
        Dd()\
            .input("/dev/zero")\
            .output(test_file.full_path)\
            .block_size(Size(1, Unit.KibiByte))\
            .count(1024)\
            .run()
        test_file.refresh_item()
        test_file_md5 = test_file.md5sum()
        sync()
        drop_caches(DropCachesMode.ALL)

    with TestRun.step("Reset platform."):
        if reboot_type == "soft":
            TestRun.executor.reboot()
        else:
            power_control = TestRun.plugin_manager.get_plugin('power_control')
            power_control.power_cycle()

    with TestRun.step("Load cache."):
        casadm.load_cache(cache_dev)
        core.mount(mount_point)

    with TestRun.step("Check file md5sum."):
        test_file.refresh_item()
        if test_file_md5 != test_file.md5sum():
            TestRun.LOGGER.error(
                "Checksums does not match - file is corrupted.")
        else:
            TestRun.LOGGER.info("File checksum is correct.")

    with TestRun.step("Remove test file."):
        test_file.remove()
コード例 #3
0
def test_ioclass_stats_sum(random_cls):
    """
        title: Test for sum of IO class statistics.
        description: |
          Check if statistics for configured IO classes sum up to cache/core statistics.
        pass_criteria:
          - Per class cache IO class statistics sum up to cache statistics.
          - Per class core IO class statistics sum up to core statistics.
    """

    min_ioclass_id = 1
    max_ioclass_id = 11
    file_size_base = Unit.Blocks4096.value

    with TestRun.step("Test prepare"):
        caches, cores = prepare(random_cls)
        cache, core = caches[0], cores[0]

    with TestRun.step("Prepare IO class config file"):
        ioclass_list = []
        for class_id in range(min_ioclass_id, max_ioclass_id):
            ioclass_list.append(
                IoClass(class_id=class_id,
                        rule=f"file_size:le:{file_size_base * class_id}&done",
                        priority=22))
        IoClass.save_list_to_config_file(ioclass_list, True)

    with TestRun.step("Load IO class config file"):
        cache.load_io_class(ioclass_config.default_config_file_path)

    with TestRun.step(
            "Generate files with particular sizes in temporary folder"):
        files_list = []
        for class_id in range(min_ioclass_id, max_ioclass_id):
            path = f"/tmp/test_file_{file_size_base * class_id}"
            File.create_file(path)
            f = File(path)
            f.padding(Size(file_size_base * class_id, Unit.Byte))
            files_list.append(f)

    with TestRun.step("Copy files to mounted core"):
        core.mount(mountpoint)
        for f in files_list:
            TestRun.LOGGER.info(f"Copying file {f.name} to mounted core")
            f.copy(mountpoint)
            sync()
        # To prevent stats pollution by filesystem requests, umount core device
        # after files are copied
        core.unmount()
        sync()

    with TestRun.step(
            "Check if per class cache IO class statistics sum up to cache statistics"
    ):
        # Name of stats, which should not be compared
        not_compare_stats = ["clean", "occupancy", "free"]
        ioclass_id_list = list(range(min_ioclass_id, max_ioclass_id))
        # Append default IO class id
        ioclass_id_list.append(0)

        cache_stats = cache.get_statistics_flat(
            stat_filter=[StatsFilter.usage, StatsFilter.req, StatsFilter.blk])
        for ioclass_id in ioclass_id_list:
            ioclass_stats = cache.get_statistics_flat(
                stat_filter=[
                    StatsFilter.usage, StatsFilter.req, StatsFilter.blk
                ],
                io_class_id=ioclass_id,
            )
            for stat_name in cache_stats:
                if stat_name in not_compare_stats:
                    continue
                cache_stats[stat_name] -= ioclass_stats[stat_name]

        for stat_name in cache_stats:
            if stat_name in not_compare_stats:
                continue
            stat_val = (cache_stats[stat_name].get_value() if isinstance(
                cache_stats[stat_name], Size) else cache_stats[stat_name])
            if stat_val != 0:
                TestRun.LOGGER.error(f"{stat_name} diverged for cache!\n")

    with TestRun.step(
            "Check if per class core IO class statistics sum up to core statistics"
    ):
        core_stats = core.get_statistics_flat(
            stat_filter=[StatsFilter.usage, StatsFilter.req, StatsFilter.blk])
        for ioclass_id in ioclass_id_list:
            ioclass_stats = core.get_statistics_flat(
                stat_filter=[
                    StatsFilter.usage, StatsFilter.req, StatsFilter.blk
                ],
                io_class_id=ioclass_id,
            )
            for stat_name in core_stats:
                if stat_name in not_compare_stats:
                    continue
                core_stats[stat_name] -= ioclass_stats[stat_name]

        for stat_name in core_stats:
            if stat_name in not_compare_stats:
                continue
            stat_val = (core_stats[stat_name].get_value() if isinstance(
                core_stats[stat_name], Size) else core_stats[stat_name])
            if stat_val != 0:
                TestRun.LOGGER.error(f"{stat_name} diverged for core!\n")

        with TestRun.step("Test cleanup"):
            for f in files_list:
                f.remove()
コード例 #4
0
def test_ioclass_stats_sum(prepare_and_cleanup):
    """Check if stats for all set ioclasses sum up to cache stats"""
    cache, core = prepare()
    min_ioclass_id = 1
    max_ioclass_id = 11
    file_size_base = Unit.KibiByte.value * 4

    TestProperties.LOGGER.info("Preparing ioclass config file")
    ioclass_config.create_ioclass_config(
        add_default_rule=True, ioclass_config_path=ioclass_config_path
    )
    for i in range(min_ioclass_id, max_ioclass_id):
        ioclass_config.add_ioclass(
            ioclass_id=i,
            eviction_priority=22,
            allocation=True,
            rule=f"file_size:le:{file_size_base*i}&done",
            ioclass_config_path=ioclass_config_path,
        )
    cache.load_io_class(ioclass_config_path)

    TestProperties.LOGGER.info("Generating files with particular sizes")
    files_list = []
    for i in range(min_ioclass_id, max_ioclass_id):
        path = f"/tmp/test_file_{file_size_base*i}"
        File.create_file(path)
        f = File(path)
        f.padding(Size(file_size_base * i, Unit.Byte))
        files_list.append(f)

    core.create_filesystem(Filesystem.ext4)

    cache.reset_counters()

    # Name of stats, which should not be compared
    not_compare_stats = ["clean", "occupancy"]
    ioclass_id_list = list(range(min_ioclass_id, max_ioclass_id))
    # Append default ioclass id
    ioclass_id_list.append(0)
    TestProperties.LOGGER.info("Copying files to mounted core and stats check")
    for f in files_list:
        # To prevent stats pollution by filesystem requests, umount core device
        # after file is copied
        core.mount(mountpoint)
        f.copy(mountpoint)
        sync()
        core.unmount()
        sync()

        cache_stats = cache.get_cache_statistics(
            stat_filter=[StatsFilter.usage, StatsFilter.req, StatsFilter.blk]
        )
        for ioclass_id in ioclass_id_list:
            ioclass_stats = cache.get_cache_statistics(
                stat_filter=[StatsFilter.usage, StatsFilter.req, StatsFilter.blk],
                io_class_id=ioclass_id,
            )
            for stat_name in cache_stats:
                if stat_name in not_compare_stats:
                    continue
                cache_stats[stat_name] -= ioclass_stats[stat_name]

        for stat_name in cache_stats:
            if stat_name in not_compare_stats:
                continue
            stat_val = (
                cache_stats[stat_name].get_value()
                if isinstance(cache_stats[stat_name], Size)
                else cache_stats[stat_name]
            )
            assert stat_val == 0, f"{stat_name} diverged!\n"

    # Test cleanup
    for f in files_list:
        f.remove()