Esempio n. 1
0
def measure_pvc_creation_time(interface, pvc_objs, start_time):
    """
    Measures and Logs PVC Creation Time of all PVCs.

    Args:
        interface (str) : an interface (RBD or CephFS) to run on.
        pvc_objs (list) : List of PVC objects for which we have to measure the time.
        start_time (str) : Formatted time from which and on to search the relevant logs.

    Logs:
        PVC Creation Time of all the PVCs.

    """
    accepted_creation_time = 1
    for pvc_obj in pvc_objs:
        creation_time = performance_lib.measure_pvc_creation_time(
            interface, pvc_obj.name, start_time)

        if creation_time <= accepted_creation_time:
            log.info(
                f"PVC {pvc_obj.name} was created in {creation_time} seconds.")
        else:
            log.error(
                f"PVC {pvc_obj.name} creation time is {creation_time} and is greater than "
                f"{accepted_creation_time} seconds.")
Esempio n. 2
0
    def create_clone(self, clone_num):
        """
        Creating clone for pvc, measure the creation time

        Args:
            clone_num (int) the number of clones to create

        Returns:
            str: The created clone name
            int: the creation time of the clone (in secs.)
            int: the csi creation time of the clone (in secs.)

        """

        csi_start_time = self.get_time("csi")
        cloned_pvc_obj = pvc.create_pvc_clone(
            sc_name=self.pvc_obj.backed_sc,
            parent_pvc=self.pvc_obj.name,
            pvc_name=f"pvc-clone-pas-test-{clone_num}",
            clone_yaml=Interfaces_info[self.interface]["clone_yaml"],
            namespace=self.namespace,
            storage_size=f"{self.pvc_obj.size}Gi",
        )
        helpers.wait_for_resource_state(cloned_pvc_obj, constants.STATUS_BOUND,
                                        600)
        cloned_pvc_obj.reload()
        clone_name = cloned_pvc_obj.name
        create_time = performance_lib.measure_pvc_creation_time(
            self.interface, clone_name, csi_start_time)
        csi_create_time = performance_lib.csi_pvc_time_measure(
            self.interface, cloned_pvc_obj, "create", csi_start_time)
        del cloned_pvc_obj
        return (clone_name, create_time, csi_create_time)
    def create_clone(self, clone_num, clone_yaml):
        """
        Creating clone for pvc, measure the creation time

        Args:
            clone_num (int) the number of clones to create
            clone_yaml : a template of clone yaml

        Returns:
            int: the creation time of the clone (in secs.)

        """
        log.info(
            f"Creating clone number {clone_num} for interface {self.interface}"
        )

        clone_name = f"pvc-clone-{clone_num}-"
        clone_name += self.params["pvcname"].split("-")[-1]
        clone_yaml["metadata"]["name"] = clone_name
        tmpfile = self.params["tmpfile"]

        log.info(f"Going to create {tmpfile}")
        with open(tmpfile, "w") as f:
            yaml.dump(clone_yaml, f, default_flow_style=False)
        start_time = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
        log.info(f"Clone yaml file is {clone_yaml}")
        res = performance_lib.run_oc_command(f"create -f {tmpfile}",
                                             self.params["nspace"])
        if self.params["ERRMSG"] in res[0]:
            raise Exception(f"Can not create clone : {res}")
        # wait until clone is ready
        self.wait_for_clone_creation(clone_name)
        create_time = performance_lib.measure_pvc_creation_time(
            self.interface, clone_name, start_time)

        log.info(f"Creation time of clone {clone_name} is {create_time} secs.")

        return create_time
    def test_pvc_creation_deletion_measurement_performance(
            self, teardown_factory, pvc_size):
        """
        Measuring PVC creation and deletion times for pvc samples
        Verifying that those times are within required limits
        """

        # Getting the full path for the test logs
        self.full_log_path = get_full_test_logs_path(cname=self)
        if self.interface == constants.CEPHBLOCKPOOL:
            self.sc = "RBD"
        if self.interface == constants.CEPHFILESYSTEM:
            self.sc = "CephFS"
        self.full_log_path += f"-{self.sc}-{pvc_size}"
        log.info(f"Logs file path name is : {self.full_log_path}")

        self.start_time = time.strftime("%Y-%m-%dT%H:%M:%SGMT", time.gmtime())

        self.get_env_info()

        # Initialize the results doc file.
        self.full_results = self.init_full_results(
            ResultsAnalyse(self.uuid, self.crd_data, self.full_log_path))
        self.full_results.add_key("pvc_size", pvc_size)
        num_of_samples = 5
        accepted_creation_time = 1
        accepted_deletion_time = 2 if self.interface == constants.CEPHFILESYSTEM else 1
        self.full_results.add_key("samples", num_of_samples)

        accepted_creation_deviation_percent = 50
        accepted_deletion_deviation_percent = 50

        creation_time_measures = []
        deletion_time_measures = []
        msg_prefix = f"Interface: {self.interface}, PVC size: {pvc_size}."

        for i in range(num_of_samples):
            logging.info(f"{msg_prefix} Start creating PVC number {i + 1}.")
            start_time = datetime.datetime.utcnow().strftime(
                "%Y-%m-%dT%H:%M:%SZ")
            pvc_obj = helpers.create_pvc(sc_name=self.sc_obj.name,
                                         size=pvc_size)
            helpers.wait_for_resource_state(pvc_obj, constants.STATUS_BOUND)
            pvc_obj.reload()

            creation_time = performance_lib.measure_pvc_creation_time(
                self.interface, pvc_obj.name, start_time)

            logging.info(
                f"{msg_prefix} PVC number {i + 1} was created in {creation_time} seconds."
            )
            if creation_time > accepted_creation_time:
                raise ex.PerformanceException(
                    f"{msg_prefix} PVC creation time is {creation_time} and is greater than "
                    f"{accepted_creation_time} seconds.")
            creation_time_measures.append(creation_time)

            pv_name = pvc_obj.backed_pv
            pvc_reclaim_policy = pvc_obj.reclaim_policy

            pod_obj = self.write_file_on_pvc(pvc_obj)
            pod_obj.delete(wait=True)
            teardown_factory(pvc_obj)
            logging.info(f"{msg_prefix} Start deleting PVC number {i + 1}")
            if pvc_reclaim_policy == constants.RECLAIM_POLICY_DELETE:
                pvc_obj.delete()
                pvc_obj.ocp.wait_for_delete(pvc_obj.name)
                helpers.validate_pv_delete(pvc_obj.backed_pv)
                deletion_time = helpers.measure_pvc_deletion_time(
                    self.interface, pv_name)
                logging.info(
                    f"{msg_prefix} PVC number {i + 1} was deleted in {deletion_time} seconds."
                )
                if deletion_time > accepted_deletion_time:
                    raise ex.PerformanceException(
                        f"{msg_prefix} PVC deletion time is {deletion_time} and is greater than "
                        f"{accepted_deletion_time} seconds.")
                deletion_time_measures.append(deletion_time)
            else:
                logging.info(
                    f"Reclaim policy of the PVC {pvc_obj.name} is not Delete;"
                    f" therefore not measuring deletion time for this PVC.")

        creation_average = self.process_time_measurements(
            "creation",
            creation_time_measures,
            accepted_creation_deviation_percent,
            msg_prefix,
        )
        self.full_results.add_key("creation-time", creation_average)
        deletion_average = self.process_time_measurements(
            "deletion",
            deletion_time_measures,
            accepted_deletion_deviation_percent,
            msg_prefix,
        )
        self.full_results.add_key("deletion-time", deletion_average)
        self.full_results.all_results["creation"] = creation_time_measures
        self.full_results.all_results["deletion"] = deletion_time_measures
        self.end_time = time.strftime("%Y-%m-%dT%H:%M:%SGMT", time.gmtime())
        self.full_results.add_key("test_time", {
            "start": self.start_time,
            "end": self.end_time
        })
        self.full_results.es_write()
        log.info(
            f"The Result can be found at : {self.full_results.results_link()}")

        if not self.dev_mode:
            # all the results are OK, the test passes, push the results to the codespeed
            push_to_pvc_time_dashboard(self.interface, "1-pvc-creation",
                                       creation_average)
            push_to_pvc_time_dashboard(self.interface, "1-pvc-deletion",
                                       deletion_average)
Esempio n. 5
0
    def test_pvc_creation_deletion_measurement_performance(
        self, teardown_factory, pvc_size
    ):
        """
        Measuring PVC creation and deletion times for pvc samples
        Verifying that those times are within required limits
        """

        num_of_samples = 5
        accepted_creation_time = 1
        accepted_deletion_time = 2 if self.interface == constants.CEPHFILESYSTEM else 1

        accepted_creation_deviation_percent = 50
        accepted_deletion_deviation_percent = 50

        creation_time_measures = []
        deletion_time_measures = []
        msg_prefix = f"Interface: {self.interface}, PVC size: {pvc_size}."

        for i in range(num_of_samples):
            logging.info(f"{msg_prefix} Start creating PVC number {i + 1}.")
            start_time = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
            pvc_obj = helpers.create_pvc(sc_name=self.sc_obj.name, size=pvc_size)
            helpers.wait_for_resource_state(pvc_obj, constants.STATUS_BOUND)
            pvc_obj.reload()

            creation_time = performance_lib.measure_pvc_creation_time(
                self.interface, pvc_obj.name, start_time
            )

            logging.info(
                f"{msg_prefix} PVC number {i + 1} was created in {creation_time} seconds."
            )
            if creation_time > accepted_creation_time:
                raise ex.PerformanceException(
                    f"{msg_prefix} PVC creation time is {creation_time} and is greater than "
                    f"{accepted_creation_time} seconds."
                )
            creation_time_measures.append(creation_time)

            pv_name = pvc_obj.backed_pv
            pvc_reclaim_policy = pvc_obj.reclaim_policy

            pod_obj = self.write_file_on_pvc(pvc_obj)
            pod_obj.delete(wait=True)
            teardown_factory(pvc_obj)
            logging.info(f"{msg_prefix} Start deleting PVC number {i + 1}")
            if pvc_reclaim_policy == constants.RECLAIM_POLICY_DELETE:
                pvc_obj.delete()
                pvc_obj.ocp.wait_for_delete(pvc_obj.name)
                helpers.validate_pv_delete(pvc_obj.backed_pv)
                deletion_time = helpers.measure_pvc_deletion_time(
                    self.interface, pv_name
                )
                logging.info(
                    f"{msg_prefix} PVC number {i + 1} was deleted in {deletion_time} seconds."
                )
                if deletion_time > accepted_deletion_time:
                    raise ex.PerformanceException(
                        f"{msg_prefix} PVC deletion time is {deletion_time} and is greater than "
                        f"{accepted_deletion_time} seconds."
                    )
                deletion_time_measures.append(deletion_time)
            else:
                logging.info(
                    f"Reclaim policy of the PVC {pvc_obj.name} is not Delete;"
                    f" therefore not measuring deletion time for this PVC."
                )

        creation_average = self.process_time_measurements(
            "creation",
            creation_time_measures,
            accepted_creation_deviation_percent,
            msg_prefix,
        )
        deletion_average = self.process_time_measurements(
            "deletion",
            deletion_time_measures,
            accepted_deletion_deviation_percent,
            msg_prefix,
        )

        # all the results are OK, the test passes, push the results to the codespeed
        push_to_pvc_time_dashboard(self.interface, "1-pvc-creation", creation_average)
        push_to_pvc_time_dashboard(self.interface, "1-pvc-deletion", deletion_average)
    def test_pvc_snapshot_performance(self, pvc_size):
        """
        1. Run I/O on a pod file
        2. Calculate md5sum of the file
        3. Take a snapshot of the PVC
        4. Measure the total snapshot creation time and the CSI snapshot creation time
        4. Restore From the snapshot and measure the time
        5. Attach a new pod to it
        6. Verify that the file is present on the new pod also
        7. Verify that the md5sum of the file on the new pod matches
           with the md5sum of the file on the original pod

        This scenario run 3 times and report all the average results of the 3 runs
        and will send them to the ES
        Args:
            pvc_size: the size of the PVC to be tested - parametrize

        """

        # Getting the total Storage capacity
        ceph_capacity = self.ceph_cluster.get_ceph_capacity()

        log.info(f"Total capacity size is : {ceph_capacity}")
        log.info(f"PVC Size is : {pvc_size}")
        log.info(f"Needed capacity is {int(int(pvc_size) * 5)}")
        if int(ceph_capacity) < int(pvc_size) * 5:
            log.error(
                f"PVC size is {pvc_size}GiB and it is too large for this system"
                f" which have only {ceph_capacity}GiB")
            return
        # Calculating the file size as 25% of the PVC size
        # in the end the PVC will be 75% full
        filesize = self.pvc_obj.size * 0.25
        # Change the file size to MB and from int to str
        file_size = f"{int(filesize * 1024)}M"

        all_results = []

        # Produce ES report
        # Collecting environment information
        self.get_env_info()

        # Initialize the results doc file.
        self.full_results = self.init_full_results(
            ResultsAnalyse(
                self.uuid,
                self.crd_data,
                self.full_log_path,
                "pvc_snapshot_perf",
            ))
        self.full_results.add_key("pvc_size", pvc_size + " GiB")
        self.full_results.add_key("interface", self.sc)
        self.full_results.all_results["creation_time"] = []
        self.full_results.all_results["csi_creation_time"] = []
        self.full_results.all_results["creation_speed"] = []
        self.full_results.all_results["restore_time"] = []
        self.full_results.all_results["restore_speed"] = []
        self.full_results.all_results["restore_csi_time"] = []
        self.full_results.all_results["dataset_inMiB"] = []
        for test_num in range(self.tests_numbers):
            test_results = {
                "test_num": test_num + 1,
                "dataset": (test_num + 1) * filesize * 1024,  # size in MiB
                "create": {
                    "time": None,
                    "csi_time": None,
                    "speed": None
                },
                "restore": {
                    "time": None,
                    "speed": None
                },
            }
            log.info(f"Starting test phase number {test_num}")
            # Step 1. Run I/O on a pod file.
            file_name = f"{self.pod_object.name}-{test_num}"
            log.info(f"Starting IO on the POD {self.pod_object.name}")
            # Going to run only write IO to fill the PVC for the snapshot
            self.pod_object.fillup_fs(size=file_size, fio_filename=file_name)

            # Wait for fio to finish
            fio_result = self.pod_object.get_fio_results()
            err_count = fio_result.get("jobs")[0].get("error")
            assert (
                err_count == 0
            ), f"IO error on pod {self.pod_object.name}. FIO result: {fio_result}"
            log.info("IO on the PVC Finished")

            # Verify presence of the file
            file_path = pod.get_file_path(self.pod_object, file_name)
            log.info(f"Actual file path on the pod {file_path}")
            assert pod.check_file_existence(
                self.pod_object, file_path), f"File {file_name} doesn't exist"
            log.info(f"File {file_name} exists in {self.pod_object.name}")

            # Step 2. Calculate md5sum of the file.
            orig_md5_sum = pod.cal_md5sum(self.pod_object, file_name)

            # Step 3. Take a snapshot of the PVC and measure the time of creation.
            snap_name = self.pvc_obj.name.replace("pvc-test",
                                                  f"snapshot-test{test_num}")
            log.info(f"Taking snapshot of the PVC {snap_name}")

            start_time = self.get_time("CSI")

            test_results["create"]["time"] = self.measure_create_snapshot_time(
                pvc_name=self.pvc_obj.name,
                snap_name=snap_name,
                namespace=self.pod_object.namespace,
                interface=self.interface,
                start_time=start_time,
            )

            test_results["create"][
                "csi_time"] = performance_lib.measure_csi_snapshot_creation_time(
                    interface=self.interface,
                    snapshot_id=self.snap_uid,
                    start_time=start_time,
                )

            test_results["create"]["speed"] = int(
                test_results["dataset"] / test_results["create"]["time"])
            log.info(
                f' Test {test_num} dataset is {test_results["dataset"]} MiB')
            log.info(
                f"Snapshot name {snap_name} and id {self.snap_uid} creation time is"
                f' : {test_results["create"]["time"]} sec.')
            log.info(
                f"Snapshot name {snap_name} and id {self.snap_uid} csi creation time is"
                f' : {test_results["create"]["csi_time"]} sec.')
            log.info(
                f'Snapshot speed is : {test_results["create"]["speed"]} MB/sec'
            )

            # Step 4. Restore the PVC from the snapshot and measure the time
            # Same Storage class of the original PVC
            sc_name = self.pvc_obj.backed_sc

            # Size should be same as of the original PVC
            pvc_size = str(self.pvc_obj.size) + "Gi"

            # Create pvc out of the snapshot
            # Both, the snapshot and the restore PVC should be in same namespace

            log.info("Restoring from the Snapshot")
            restore_pvc_name = self.pvc_obj.name.replace(
                "pvc-test", f"restore-pvc{test_num}")
            restore_pvc_yaml = constants.CSI_RBD_PVC_RESTORE_YAML
            if self.interface == constants.CEPHFILESYSTEM:
                restore_pvc_yaml = constants.CSI_CEPHFS_PVC_RESTORE_YAML

            csi_start_time = self.get_time("csi")
            log.info("Restoring the PVC from Snapshot")
            restore_pvc_obj = pvc.create_restore_pvc(
                sc_name=sc_name,
                snap_name=self.snap_obj.name,
                namespace=self.snap_obj.namespace,
                size=pvc_size,
                pvc_name=restore_pvc_name,
                restore_pvc_yaml=restore_pvc_yaml,
            )
            helpers.wait_for_resource_state(
                restore_pvc_obj,
                constants.STATUS_BOUND,
                timeout=3600  # setting this to 60 Min.
                # since it can be take long time to restore, and we want it to finished.
            )
            restore_pvc_obj.reload()
            log.info("PVC was restored from the snapshot")
            test_results["restore"][
                "time"] = performance_lib.measure_pvc_creation_time(
                    self.interface, restore_pvc_obj.name, csi_start_time)

            test_results["restore"]["speed"] = int(
                test_results["dataset"] / test_results["restore"]["time"])
            log.info(
                f'Snapshot restore time is : {test_results["restore"]["time"]}'
            )
            log.info(
                f'restore speed is : {test_results["restore"]["speed"]} MB/sec'
            )

            test_results["restore"][
                "csi_time"] = performance_lib.csi_pvc_time_measure(
                    self.interface, restore_pvc_obj, "create", csi_start_time)
            log.info(
                f'Snapshot csi restore time is : {test_results["restore"]["csi_time"]}'
            )

            # Step 5. Attach a new pod to the restored PVC
            restore_pod_object = helpers.create_pod(
                interface_type=self.interface,
                pvc_name=restore_pvc_obj.name,
                namespace=self.snap_obj.namespace,
                pod_dict_path=constants.PERF_POD_YAML,
            )

            # Confirm that the pod is running
            helpers.wait_for_resource_state(resource=restore_pod_object,
                                            state=constants.STATUS_RUNNING)
            restore_pod_object.reload()

            # Step 6. Verify that the file is present on the new pod also.
            log.info(f"Checking the existence of {file_name} "
                     f"on restore pod {restore_pod_object.name}")
            assert pod.check_file_existence(
                restore_pod_object,
                file_path), f"File {file_name} doesn't exist"
            log.info(f"File {file_name} exists in {restore_pod_object.name}")

            # Step 7. Verify that the md5sum matches
            log.info(
                f"Verifying that md5sum of {file_name} "
                f"on pod {self.pod_object.name} matches with md5sum "
                f"of the same file on restore pod {restore_pod_object.name}")
            assert pod.verify_data_integrity(
                restore_pod_object, file_name,
                orig_md5_sum), "Data integrity check failed"
            log.info("Data integrity check passed, md5sum are same")

            restore_pod_object.delete()
            restore_pvc_obj.delete()

            all_results.append(test_results)

        # clean the enviroment
        self.pod_object.delete()
        self.pvc_obj.delete()
        self.delete_test_project()

        # logging the test summary, all info in one place for easy log reading
        c_speed, c_runtime, c_csi_runtime, r_speed, r_runtime, r_csi_runtime = (
            0 for i in range(6))

        log.info("Test summary :")
        for tst in all_results:
            c_speed += tst["create"]["speed"]
            c_runtime += tst["create"]["time"]
            c_csi_runtime += tst["create"]["csi_time"]
            r_speed += tst["restore"]["speed"]
            r_runtime += tst["restore"]["time"]
            r_csi_runtime += tst["restore"]["csi_time"]

            self.full_results.all_results["creation_time"].append(
                tst["create"]["time"])
            self.full_results.all_results["csi_creation_time"].append(
                tst["create"]["csi_time"])
            self.full_results.all_results["creation_speed"].append(
                tst["create"]["speed"])
            self.full_results.all_results["restore_time"].append(
                tst["restore"]["time"])
            self.full_results.all_results["restore_speed"].append(
                tst["restore"]["speed"])
            self.full_results.all_results["restore_csi_time"].append(
                tst["restore"]["csi_time"])
            self.full_results.all_results["dataset_inMiB"].append(
                tst["dataset"])
            log.info(
                f"Test {tst['test_num']} results : dataset is {tst['dataset']} MiB. "
                f"Take snapshot time is {tst['create']['time']} "
                f"at {tst['create']['speed']} MiB/Sec "
                f"Restore from snapshot time is {tst['restore']['time']} "
                f"at {tst['restore']['speed']} MiB/Sec ")

        avg_snap_c_time = c_runtime / self.tests_numbers
        avg_snap_csi_c_time = c_csi_runtime / self.tests_numbers
        avg_snap_c_speed = c_speed / self.tests_numbers
        avg_snap_r_time = r_runtime / self.tests_numbers
        avg_snap_r_speed = r_speed / self.tests_numbers
        avg_snap_r_csi_time = r_csi_runtime / self.tests_numbers
        log.info(f" Average snapshot creation time is {avg_snap_c_time} sec.")
        log.info(
            f" Average csi snapshot creation time is {avg_snap_csi_c_time} sec."
        )
        log.info(
            f" Average snapshot creation speed is {avg_snap_c_speed} MiB/sec")
        log.info(f" Average snapshot restore time is {avg_snap_r_time} sec.")
        log.info(
            f" Average snapshot restore speed is {avg_snap_r_speed} MiB/sec")
        log.info(
            f" Average snapshot restore csi time is {avg_snap_r_csi_time} sec."
        )

        self.full_results.add_key("avg_snap_creation_time_insecs",
                                  avg_snap_c_time)
        self.full_results.add_key("avg_snap_csi_creation_time_insecs",
                                  avg_snap_csi_c_time)
        self.full_results.add_key("avg_snap_creation_speed", avg_snap_c_speed)
        self.full_results.add_key("avg_snap_restore_time_insecs",
                                  avg_snap_r_time)
        self.full_results.add_key("avg_snap_restore_speed", avg_snap_r_speed)
        self.full_results.add_key("avg_snap_restore_csi_time_insecs",
                                  avg_snap_r_csi_time)

        # Write the test results into the ES server
        self.results_path = helpers.get_full_test_logs_path(cname=self)
        log.info(f"Logs file path name is : {self.full_log_path}")
        log.info("writing results to elastic search server")
        if self.full_results.es_write():
            res_link = self.full_results.results_link()

            # write the ES link to the test results in the test log.
            log.info(f"The result can be found at : {res_link}")

            self.write_result_to_file(res_link)