Exemplo n.º 1
0
    def test_pvc_deletion_measurement_performance(self, teardown_factory,
                                                  pvc_size):
        """
        Measuring PVC deletion time is within supported limits
        """
        logging.info('Start creating new PVC')

        pvc_obj = helpers.create_pvc(sc_name=self.sc_obj.name, size=pvc_size)
        helpers.wait_for_resource_state(pvc_obj, constants.STATUS_BOUND)
        pvc_obj.reload()
        pv_name = pvc_obj.backed_pv
        pvc_reclaim_policy = pvc_obj.reclaim_policy
        teardown_factory(pvc_obj)
        pvc_obj.delete()
        logging.info('Start deletion of PVC')
        pvc_obj.ocp.wait_for_delete(pvc_obj.name)
        if pvc_reclaim_policy == constants.RECLAIM_POLICY_DELETE:
            helpers.validate_pv_delete(pvc_obj.backed_pv)
        delete_time = helpers.measure_pvc_deletion_time(
            self.interface, pv_name)
        # Deletion time for CephFS PVC is a little over 3 seconds
        deletion_time = 4 if self.interface == constants.CEPHFILESYSTEM else 3
        logging.info(f"PVC deleted in {delete_time} seconds")
        if delete_time > deletion_time:
            raise ex.PerformanceException(
                f"PVC deletion time is {delete_time} and greater than {deletion_time} second"
            )
        push_to_pvc_time_dashboard(self.interface, "deletion", delete_time)
Exemplo n.º 2
0
    def test_bulk_pvc_creation_after_deletion_performance(
            self, teardown_factory):
        """
        Measuring PVC creation time of bulk of 75% of initial PVC bulk (120) in the same
        rate after deleting ( serial deletion) 75% of the initial PVCs.

        Args:
            teardown_factory: A fixture used when we want a new resource that was created during the tests
                               to be removed in the teardown phase.
        Returns:

        """
        initial_number_of_pvcs = 120
        number_of_pvcs = math.ceil(initial_number_of_pvcs * 0.75)

        log.info(f"Start creating new {initial_number_of_pvcs} PVCs in a bulk")
        pvc_objs, _ = helpers.create_multiple_pvcs(
            sc_name=self.sc_obj.name,
            namespace=defaults.ROOK_CLUSTER_NAMESPACE,
            number_of_pvc=initial_number_of_pvcs,
            size=self.pvc_size,
            burst=True,
        )
        for pvc_obj in pvc_objs:
            teardown_factory(pvc_obj)
        with ThreadPoolExecutor() as executor:
            for pvc_obj in pvc_objs:
                executor.submit(helpers.wait_for_resource_state, pvc_obj,
                                constants.STATUS_BOUND)

                executor.submit(pvc_obj.reload)
        log.info("Deleting 75% of the PVCs - 90 PVCs")
        assert pvc.delete_pvcs(pvc_objs[:number_of_pvcs],
                               True), "Deletion of 75% of PVCs failed"
        log.info("Re-creating the 90 PVCs")
        pvc_objs, _ = helpers.create_multiple_pvcs(
            sc_name=self.sc_obj.name,
            namespace=defaults.ROOK_CLUSTER_NAMESPACE,
            number_of_pvc=number_of_pvcs,
            size=self.pvc_size,
            burst=True,
        )
        start_time = helpers.get_provision_time(self.interface,
                                                pvc_objs,
                                                status="start")
        end_time = helpers.get_provision_time(self.interface,
                                              pvc_objs,
                                              status="end")
        total = end_time - start_time
        total_time = total.total_seconds()
        logging.info(
            f"Deletion time of {number_of_pvcs} is {total_time} seconds.")

        if total_time > 50:
            raise ex.PerformanceException(
                f"{number_of_pvcs} PVCs creation (after initial deletion of "
                f"75% of PVCs) time is {total_time} and greater than 50 seconds."
            )
        logging.info(
            f"{number_of_pvcs} PVCs creation time took less than a 50 seconds")
Exemplo n.º 3
0
 def test_pod_start_time(self, pod):
     """
     Test to log pod start time
     """
     start_time_dict = pod_start_time(pod)
     start_time = start_time_dict["web-server"]
     logging.info(f"pod start time: {start_time} seconds")
     if start_time > 30:
         raise ex.PerformanceException(f"pod start time is {start_time},"
                                       f"which is greater than 30 seconds")
Exemplo n.º 4
0
    def test_pvc_creation_measurement_performance(self, teardown_factory,
                                                  pvc_size):
        """
        The test measures PVC creation times for sample_num volumes
        (limit for the creation time for pvc is defined in accepted_create_time)
        and compares the creation time of each to the accepted_create_time ( if greater - fails the test)
        If all the measures are up to the accepted value
        The test calculates .... difference between creation time of each one of the PVCs and the average
        is not more than Accepted diff ( currently 5%)

        Args:
            teardown factory: A fixture used when we want a new resource that was created during the tests
                               to be removed in the teardown phase.
            pvc_size: Size of the created PVC
        """
        num_of_samples = 3
        accepted_deviation_percent = 5
        accepted_create_time = 3

        create_measures = self.create_mutiple_pvcs_statistics(
            num_of_samples, teardown_factory, pvc_size)
        log.info(f"Current measures are {create_measures}")

        for i in range(num_of_samples):
            if create_measures[i] > accepted_create_time:
                raise ex.PerformanceException(
                    f"PVC creation time is {create_measures[i]} and is greater than {accepted_create_time} seconds."
                )

        average = statistics.mean(create_measures)
        st_deviation = statistics.stdev(create_measures)
        log.info(
            f"The average creation time for the sampled {num_of_samples} PVCs is {average}."
        )

        st_deviation_percent = abs(st_deviation - average) / average * 100.0
        if st_deviation > accepted_deviation_percent:
            raise ex.PerformanceException(
                f"PVC creation time deviation is {st_deviation_percent}%"
                f"and is greater than the allowed {accepted_deviation_percent}%."
            )
        push_to_pvc_time_dashboard(self.interface, "1-pvc-creation",
                                   st_deviation)
Exemplo n.º 5
0
    def test_multiple_pvc_creation_after_deletion_performance(
        self, teardown_factory
    ):
        """
        Measuring PVC creation time of 75% of initial PVCs (120) in the same
        rate after deleting 75% of the initial PVCs
        """
        initial_number_of_pvcs = 120
        number_of_pvcs = math.ceil(initial_number_of_pvcs * 0.75)

        log.info('Start creating new 120 PVCs')
        pvc_objs = helpers.create_multiple_pvcs(
            sc_name=self.sc_obj.name,
            namespace=defaults.ROOK_CLUSTER_NAMESPACE,
            number_of_pvc=initial_number_of_pvcs,
            size=self.pvc_size,
        )
        for pvc_obj in pvc_objs:
            teardown_factory(pvc_obj)
        with ThreadPoolExecutor() as executor:
            for pvc_obj in pvc_objs:
                executor.submit(
                    helpers.wait_for_resource_state, pvc_obj,
                    constants.STATUS_BOUND
                )

                executor.submit(pvc_obj.reload)
        log.info('Deleting 75% of the PVCs - 90 PVCs')
        assert pvc.delete_pvcs(pvc_objs[:number_of_pvcs], True), (
            "Deletion of 75% of PVCs failed"
        )
        log.info('Re-creating the 90 PVCs')
        pvc_objs = helpers.create_multiple_pvcs(
            sc_name=self.sc_obj.name,
            namespace=defaults.ROOK_CLUSTER_NAMESPACE,
            number_of_pvc=number_of_pvcs,
            size=self.pvc_size,
        )
        start_time = helpers.get_start_creation_time(
            self.interface, pvc_objs[0].name
        )
        end_time = helpers.get_end_creation_time(
            self.interface, pvc_objs[number_of_pvcs - 1].name,
        )
        total = end_time - start_time
        total_time = total.total_seconds()
        if total_time > 45:
            raise ex.PerformanceException(
                f"{number_of_pvcs} PVCs creation (after initial deletion of "
                f"75%) time is {total_time} and greater than 45 seconds"
            )
        logging.info(
            f"{number_of_pvcs} PVCs creation time took less than a 45 seconds"
        )
Exemplo n.º 6
0
    def test_pod_start_time(self, pod):
        """
        Test to log pod start time
        """
        # Getting the test start time
        self.start_time = self.get_time()

        # The actual test
        start_time_dict = pod_start_time(pod)

        # Getting the full path for the test logs
        self.full_log_path = get_full_test_logs_path(cname=self)
        if self.interface == constants.CEPHBLOCKPOOL:
            self.sc = "RBD"
        elif self.interface == constants.CEPHFILESYSTEM:
            self.sc = "CephFS"
        elif self.interface == constants.CEPHBLOCKPOOL_THICK:
            self.sc = "RBD-Thick"
        self.full_log_path += f"-{self.sc}"
        log.info(f"Logs file path name is : {self.full_log_path}")

        # Collecting environment information
        self.get_env_info()

        # Initialize the results doc file.
        self.full_results = self.init_full_results(
            ResultsAnalyse(self.uuid, self.crd_data, self.full_log_path))

        start_time = start_time_dict["web-server"]
        logging.info(f"pod start time: {start_time} seconds")
        if start_time > 30:
            raise ex.PerformanceException(f"pod start time is {start_time},"
                                          f"which is greater than 30 seconds")
        self.full_results.add_key("storageclass", self.sc)
        self.full_results.add_key("attach_time", start_time)

        # Getting the test end time
        self.end_time = self.get_time()

        # Add the test time to the ES report
        self.full_results.add_key("test_time", {
            "start": self.start_time,
            "end": self.end_time
        })

        # Write the test results into the ES server
        self.full_results.es_write()

        # write the ES link to the test results in the test log.
        log.info(
            f"The Result can be found at : {self.full_results.results_link()}")
Exemplo n.º 7
0
    def test_multiple_pvc_creation_measurement_performance(
        self, teardown_factory
    ):
        """
        Measuring PVC creation time of 120 PVCs in 180 seconds

        Args:
            teardown_factory: A fixture used when we want a new resource that was created during the tests
                               to be removed in the teardown phase.
        Returns:

        """
        number_of_pvcs = 120
        log.info('Start creating new 120 PVCs')

        pvc_objs = helpers.create_multiple_pvcs(
            sc_name=self.sc_obj.name,
            namespace=defaults.ROOK_CLUSTER_NAMESPACE,
            number_of_pvc=number_of_pvcs,
            size=self.pvc_size,
            burst=True
        )
        for pvc_obj in pvc_objs:
            pvc_obj.reload()
            teardown_factory(pvc_obj)
        with ThreadPoolExecutor(max_workers=5) as executor:
            for pvc_obj in pvc_objs:
                executor.submit(
                    helpers.wait_for_resource_state, pvc_obj,
                    constants.STATUS_BOUND
                )

                executor.submit(pvc_obj.reload)
        start_time = helpers.get_provision_time(
            self.interface, pvc_objs, status='start'
        )
        end_time = helpers.get_provision_time(
            self.interface, pvc_objs, status='end'
        )
        total = end_time - start_time
        total_time = total.total_seconds()
        if total_time > 180:
            raise ex.PerformanceException(
                f"{number_of_pvcs} PVCs creation time is {total_time} and "
                f"greater than 180 seconds"
            )
        logging.info(
            f"{number_of_pvcs} PVCs creation time took {total_time} seconds"
        )
Exemplo n.º 8
0
    def test_pvc_creation_measurement_performance(self, teardown_factory):
        """
        Measuring PVC creation time
        """
        log.info('Start creating new PVC')

        pvc_obj = helpers.create_pvc(sc_name=self.sc_obj.name,
                                     size=self.pvc_size)
        helpers.wait_for_resource_state(pvc_obj, constants.STATUS_BOUND)
        pvc_obj.reload()
        teardown_factory(pvc_obj)
        create_time = helpers.measure_pvc_creation_time(
            self.interface, pvc_obj.name)
        if create_time > 1:
            raise ex.PerformanceException(
                f"PVC creation time is {create_time} and greater than 1 second"
            )
        logging.info("PVC creation took less than a 1 second")
Exemplo n.º 9
0
    def test_multiple_pvc_creation_measurement_performance(
        self, teardown_factory
    ):
        """
        Measuring PVC creation time of 120 PVCs in 180 seconds
        """
        number_of_pvcs = 120
        log.info('Start creating new 120 PVCs')

        pvc_objs = helpers.create_multiple_pvcs(
            sc_name=self.sc_obj.name,
            namespace=defaults.ROOK_CLUSTER_NAMESPACE,
            number_of_pvc=number_of_pvcs,
            size=self.pvc_size,
        )
        for pvc_obj in pvc_objs:
            pvc_obj.reload()
            teardown_factory(pvc_obj)
        with ThreadPoolExecutor(max_workers=5) as executor:
            for pvc_obj in pvc_objs:
                executor.submit(
                    helpers.wait_for_resource_state, pvc_obj,
                    constants.STATUS_BOUND
                )

                executor.submit(pvc_obj.reload)
        start_time = helpers.get_start_creation_time(
            self.interface, pvc_objs[0].name
        )
        end_time = helpers.get_end_creation_time(
            self.interface, pvc_objs[number_of_pvcs - 1].name,
        )
        total = end_time - start_time
        total_time = total.total_seconds()
        if total_time > 180:
            raise ex.PerformanceException(
                f"{number_of_pvcs} PVCs creation time is {total_time} and "
                f"greater than 180 seconds"
            )
        logging.info(
            f"{number_of_pvcs} PVCs creation time took {total_time} seconds"
        )
Exemplo n.º 10
0
    def process_time_measurements(
        self, action_name, time_measures, accepted_deviation_percent, msg_prefix
    ):
        """
           Analyses the given time measured. If the standard deviation of these times is bigger than the
           provided accepted deviation percent, fails the test

        Args:
            action_name (str): Name of the action for which these measurements were collected; used for the logging
            time_measures (list of floats): A list of time measurements
            accepted_deviation_percent (int): Accepted deviation percent,
                if the standard  deviation of the provided time measurements is bigger than this value, the test fails
            msg_prefix (str) : A string for comprehensive logging

        Returns:
            (float) The average value of the provided time measurements
        """
        average = statistics.mean(time_measures)
        log.info(
            f"{msg_prefix} The average {action_name} time for the sampled {len(time_measures)} "
            f"PVCs is {average} seconds."
        )

        st_deviation = statistics.stdev(time_measures)
        st_deviation_percent = st_deviation / average * 100.0
        if st_deviation_percent > accepted_deviation_percent:
            raise ex.PerformanceException(
                f"{msg_prefix} PVC ${action_name} time deviation is {st_deviation_percent}% "
                f"and is greater than the allowed {accepted_deviation_percent}%."
            )

        log.info(
            f"{msg_prefix} The standard deviation percent for {action_name} of {len(time_measures)} sampled "
            f"PVCs is {st_deviation_percent}%."
        )

        return average
Exemplo n.º 11
0
    def test_pvc_reattach_time_performance(self, pvc_factory,
                                           teardown_factory):
        """
        Test assign nodeName to a pod using RWX pvc
        Performance in test_multiple_pvc_creation_measurement_performance
        Each kernel (unzipped) is 892M and 61694 files
        """

        kernel_url = 'https://cdn.kernel.org/pub/linux/kernel/v4.x/linux-4.19.5.tar.gz'
        download_path = 'tmp'
        # Number of times we copy the kernel
        copies = 3

        # Download a linux Kernel
        import os
        dir_path = os.path.join(os.getcwd(), download_path)
        file_path = os.path.join(dir_path, 'file.gz')
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)
        urllib.request.urlretrieve(kernel_url, file_path)

        worker_nodes_list = helpers.get_worker_nodes()
        assert (len(worker_nodes_list) > 1)
        node_one = worker_nodes_list[0]
        node_two = worker_nodes_list[1]

        # Create a PVC
        accessmode = constants.ACCESS_MODE_RWX
        if self.interface == constants.CEPHBLOCKPOOL:
            accessmode = constants.ACCESS_MODE_RWO
        pvc_obj = pvc_factory(
            interface=self.interface,
            access_mode=accessmode,
            status=constants.STATUS_BOUND,
            size='15',
        )

        # Create a pod on one node
        logging.info(
            f"Creating Pod with pvc {pvc_obj.name} on node {node_one}")

        helpers.pull_images('nginx')
        pod_obj1 = helpers.create_pod(interface_type=self.interface,
                                      pvc_name=pvc_obj.name,
                                      namespace=pvc_obj.namespace,
                                      node_name=node_one,
                                      pod_dict_path=constants.NGINX_POD_YAML)

        # Confirm that pod is running on the selected_nodes
        logging.info('Checking whether pods are running on the selected nodes')
        helpers.wait_for_resource_state(resource=pod_obj1,
                                        state=constants.STATUS_RUNNING,
                                        timeout=120)

        pod_name = pod_obj1.name
        pod_path = '/var/lib/www/html'

        _ocp = OCP(namespace=pvc_obj.namespace)

        rsh_cmd = f"exec {pod_name} -- apt-get update"
        _ocp.exec_oc_cmd(rsh_cmd)
        rsh_cmd = f"exec {pod_name} -- apt-get install -y rsync"
        _ocp.exec_oc_cmd(rsh_cmd, ignore_error=True, out_yaml_format=False)

        rsh_cmd = f"rsync {dir_path} {pod_name}:{pod_path}"
        _ocp.exec_oc_cmd(rsh_cmd)

        rsh_cmd = f"exec {pod_name} -- tar xvf {pod_path}/tmp/file.gz -C /var/lib/www/html/tmp"
        _ocp.exec_oc_cmd(rsh_cmd)

        for x in range(copies):
            rsh_cmd = f"exec {pod_name} -- mkdir -p {pod_path}/folder{x}"
            _ocp.exec_oc_cmd(rsh_cmd)
            rsh_cmd = f"exec {pod_name} -- cp -r {pod_path}/tmp {pod_path}/folder{x}"
            _ocp.exec_oc_cmd(rsh_cmd)

        rsh_cmd = f"delete pod {pod_name}"
        _ocp.exec_oc_cmd(rsh_cmd)

        logging.info(
            f"Creating Pod with pvc {pvc_obj.name} on node {node_two}")

        pod_obj2 = helpers.create_pod(interface_type=self.interface,
                                      pvc_name=pvc_obj.name,
                                      namespace=pvc_obj.namespace,
                                      node_name=node_two,
                                      pod_dict_path=constants.NGINX_POD_YAML)

        start_time = time.time()

        pod_name = pod_obj2.name
        helpers.wait_for_resource_state(resource=pod_obj2,
                                        state=constants.STATUS_RUNNING,
                                        timeout=120)
        end_time = time.time()
        total_time = end_time - start_time
        if total_time > 60:
            raise ex.PerformanceException(
                f"Pod creation time is {total_time} and "
                f"greater than 60 seconds")
        logging.info(f"Pod {pod_name} creation time took {total_time} seconds")

        teardown_factory(pod_obj2)
        os.remove(file_path)
        os.rmdir(dir_path)
Exemplo n.º 12
0
    def test_multiple_pvc_deletion_measurement_performance(
            self, teardown_factory):
        """
        Measuring PVC deletion time of 120 PVCs in 180 seconds

        Args:
            teardown_factory: A fixture used when we want a new resource that was created during the tests
                               to be removed in the teardown phase.
        Returns:

        """
        number_of_pvcs = 120
        pvc_size = "1Gi"
        msg_prefix = f"Interface: {self.interface}, PVC size: {pvc_size}."

        log.info(f"{msg_prefix} Start creating new {number_of_pvcs} PVCs")

        pvc_objs, _ = helpers.create_multiple_pvcs(
            sc_name=self.sc_obj.name,
            namespace=self.namespace,
            number_of_pvc=number_of_pvcs,
            size=pvc_size,
            burst=True,
        )

        for pvc_obj in pvc_objs:
            pvc_obj.reload()
            teardown_factory(pvc_obj)

        timeout = 600 if self.interface == constants.CEPHBLOCKPOOL_THICK else 60
        with ThreadPoolExecutor(max_workers=5) as executor:
            for pvc_obj in pvc_objs:
                executor.submit(
                    helpers.wait_for_resource_state,
                    pvc_obj,
                    constants.STATUS_BOUND,
                    timeout=timeout,
                )
                executor.submit(pvc_obj.reload)

        pod_objs = []
        for pvc_obj in pvc_objs:
            pod_obj = self.write_file_on_pvc(pvc_obj, 0.3)
            pod_objs.append(pod_obj)

        # Get pvc_name, require pvc_name to fetch deletion time data from log
        threads = list()
        for pvc_obj in pvc_objs:
            process = threading.Thread(target=pvc_obj.reload)
            process.start()
            threads.append(process)
        for process in threads:
            process.join()

        pvc_name_list, pv_name_list = ([] for i in range(2))
        threads = list()
        for pvc_obj in pvc_objs:
            process1 = threading.Thread(
                target=pvc_name_list.append(pvc_obj.name))
            process2 = threading.Thread(
                target=pv_name_list.append(pvc_obj.backed_pv))
            process1.start()
            process2.start()
            threads.append(process1)
            threads.append(process2)
        for process in threads:
            process.join()
        log.info(f"{msg_prefix} Preparing to delete 120 PVC")

        # Delete PVC
        for pvc_obj, pod_obj in zip(pvc_objs, pod_objs):
            pod_obj.delete(wait=True)
            pvc_obj.delete()
            pvc_obj.ocp.wait_for_delete(pvc_obj.name)

        # Get PVC deletion time
        pvc_deletion_time = helpers.measure_pv_deletion_time_bulk(
            interface=self.interface, pv_name_list=pv_name_list)
        log.info(
            f"{msg_prefix} {number_of_pvcs} bulk deletion time is {pvc_deletion_time}"
        )

        # accepted deletion time is 2 secs for each PVC
        accepted_pvc_deletion_time = number_of_pvcs * 2

        for del_time in pvc_deletion_time.values():
            if del_time > accepted_pvc_deletion_time:
                raise ex.PerformanceException(
                    f"{msg_prefix} {number_of_pvcs} PVCs deletion time is {pvc_deletion_time.values()} and is "
                    f"greater than {accepted_pvc_deletion_time} seconds")

        logging.info(f"{msg_prefix} {number_of_pvcs} PVCs deletion times are:")
        for name, a_time in pvc_deletion_time.items():
            logging.info(f"{name} deletion time is: {a_time} seconds")

        if self.interface == constants.CEPHBLOCKPOOL:
            self.sc = "RBD"
        elif self.interface == constants.CEPHFILESYSTEM:
            self.sc = "CephFS"
        elif self.interface == constants.CEPHBLOCKPOOL_THICK:
            self.sc = "RBD-Thick"

        full_log_path = get_full_test_logs_path(
            cname=self) + f"-{self.sc}-{pvc_size}"
        self.results_path = get_full_test_logs_path(cname=self)
        log.info(f"Logs file path name is : {full_log_path}")

        self.get_env_info()

        # Initialize the results doc file.
        full_results = self.init_full_results(
            ResultsAnalyse(
                self.uuid,
                self.crd_data,
                full_log_path,
                "pvc_bulk_deletion_fullres",
            ))

        full_results.add_key("interface", self.interface)
        full_results.add_key("bulk_size", number_of_pvcs)
        full_results.add_key("pvc_size", pvc_size)
        full_results.all_results["bulk_deletion_time"] = pvc_deletion_time

        if full_results.es_write():
            res_link = full_results.results_link()
            log.info(f"The Result can be found at : {res_link}")

            # Create text file with results of all subtest (3 - according to the parameters)
            self.write_result_to_file(res_link)
Exemplo n.º 13
0
    def test_pod_start_time(self, pod_obj_list):
        """
        Test to log pod start times for all the sampled pods
        """
        # Getting the test start time
        self.test_start_time = PASTest.get_time()

        # Start of the actual test
        start_time_dict_list = []
        for pod in pod_obj_list:
            start_time_dict_list.append(pod_start_time(pod))

        # Getting the full path for the test logs
        self.full_log_path = get_full_test_logs_path(cname=self)
        if self.interface == constants.CEPHBLOCKPOOL:
            self.sc = "RBD"
        elif self.interface == constants.CEPHFILESYSTEM:
            self.sc = "CephFS"
        elif self.interface == constants.CEPHBLOCKPOOL_THICK:
            self.sc = "RBD-Thick"
        self.full_log_path += f"-{self.sc}"
        log.info(f"Logs file path name is : {self.full_log_path}")

        # Collecting environment information
        self.get_env_info()

        # Initialize the results doc file.
        self.full_results = self.init_full_results(
            ResultsAnalyse(self.uuid, self.crd_data, self.full_log_path)
        )
        self.full_results.add_key("storageclass", self.sc)

        time_measures = [t["web-server"] for t in start_time_dict_list]
        for index, start_time in enumerate(time_measures):
            logging.info(
                f"{self.msg_prefix} pod number {index} start time: {start_time} seconds"
            )
            if start_time > 30:
                raise ex.PerformanceException(
                    f"{self.msg_prefix} Pod number {index} start time is {start_time},"
                    f"which is greater than 30 seconds"
                )
        self.full_results.add_key("attach_time", time_measures)

        average = statistics.mean(time_measures)
        logging.info(
            f"{self.msg_prefix} The average time for the sampled {len(time_measures)} pods is {average} seconds."
        )
        self.full_results.add_key("attach_time_average", average)

        st_deviation = statistics.stdev(time_measures)
        st_deviation_percent = st_deviation / average * 100.0
        logging.info(
            f"{self.msg_prefix} The standard deviation percent for the sampled {len(time_measures)} pods"
            f" is {st_deviation_percent}"
        )
        self.full_results.add_key("attach_time_stdev_percent", st_deviation_percent)

        # Getting the test end time
        self.test_end_time = PASTest.get_time()

        # Add the test time to the ES report
        self.full_results.add_key(
            "test_time", {"start": self.test_start_time, "end": self.test_end_time}
        )

        self.full_results.add_key("samples_number", self.samples_num)
        self.full_results.add_key("pvc_size", self.pvc_size)

        # Write the test results into the ES server
        self.full_results.es_write()

        # write the ES link to the test results in the test log.
        log.info(
            f"{self.msg_prefix} The Result can be found at : {self.full_results.results_link()}"
        )
Exemplo n.º 14
0
    def test_pvc_creation_deletion_measurement_performance(
        self, teardown_factory, pvc_size
    ):
        """
        Measuring PVC creation and deletion times for pvc samples
        Verifying that those times are within required limits
        """

        num_of_samples = 5
        accepted_creation_time = 1
        accepted_deletion_time = 2 if self.interface == constants.CEPHFILESYSTEM else 1

        accepted_creation_deviation_percent = 50
        accepted_deletion_deviation_percent = 50

        creation_time_measures = []
        deletion_time_measures = []
        msg_prefix = f"Interface: {self.interface}, PVC size: {pvc_size}."

        for i in range(num_of_samples):
            logging.info(f"{msg_prefix} Start creating PVC number {i + 1}.")
            start_time = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
            pvc_obj = helpers.create_pvc(sc_name=self.sc_obj.name, size=pvc_size)
            helpers.wait_for_resource_state(pvc_obj, constants.STATUS_BOUND)
            pvc_obj.reload()

            creation_time = performance_lib.measure_pvc_creation_time(
                self.interface, pvc_obj.name, start_time
            )

            logging.info(
                f"{msg_prefix} PVC number {i + 1} was created in {creation_time} seconds."
            )
            if creation_time > accepted_creation_time:
                raise ex.PerformanceException(
                    f"{msg_prefix} PVC creation time is {creation_time} and is greater than "
                    f"{accepted_creation_time} seconds."
                )
            creation_time_measures.append(creation_time)

            pv_name = pvc_obj.backed_pv
            pvc_reclaim_policy = pvc_obj.reclaim_policy

            pod_obj = self.write_file_on_pvc(pvc_obj)
            pod_obj.delete(wait=True)
            teardown_factory(pvc_obj)
            logging.info(f"{msg_prefix} Start deleting PVC number {i + 1}")
            if pvc_reclaim_policy == constants.RECLAIM_POLICY_DELETE:
                pvc_obj.delete()
                pvc_obj.ocp.wait_for_delete(pvc_obj.name)
                helpers.validate_pv_delete(pvc_obj.backed_pv)
                deletion_time = helpers.measure_pvc_deletion_time(
                    self.interface, pv_name
                )
                logging.info(
                    f"{msg_prefix} PVC number {i + 1} was deleted in {deletion_time} seconds."
                )
                if deletion_time > accepted_deletion_time:
                    raise ex.PerformanceException(
                        f"{msg_prefix} PVC deletion time is {deletion_time} and is greater than "
                        f"{accepted_deletion_time} seconds."
                    )
                deletion_time_measures.append(deletion_time)
            else:
                logging.info(
                    f"Reclaim policy of the PVC {pvc_obj.name} is not Delete;"
                    f" therefore not measuring deletion time for this PVC."
                )

        creation_average = self.process_time_measurements(
            "creation",
            creation_time_measures,
            accepted_creation_deviation_percent,
            msg_prefix,
        )
        deletion_average = self.process_time_measurements(
            "deletion",
            deletion_time_measures,
            accepted_deletion_deviation_percent,
            msg_prefix,
        )

        # all the results are OK, the test passes, push the results to the codespeed
        push_to_pvc_time_dashboard(self.interface, "1-pvc-creation", creation_average)
        push_to_pvc_time_dashboard(self.interface, "1-pvc-deletion", deletion_average)
Exemplo n.º 15
0
    def test_bulk_pvc_creation_deletion_measurement_performance(
            self, teardown_factory, bulk_size):
        """
        Measuring PVC creation and deletion time of bulk_size PVCs

        Args:
            teardown_factory: A fixture used when we want a new resource that was created during the tests
                               to be removed in the teardown phase.
            bulk_size: Size of the bulk to be tested
        Returns:

        """
        bulk_creation_time_limit = bulk_size / 2
        log.info(f"Start creating new {bulk_size} PVCs")

        pvc_objs, yaml_creation_dir = helpers.create_multiple_pvcs(
            sc_name=self.sc_obj.name,
            namespace=defaults.ROOK_CLUSTER_NAMESPACE,
            number_of_pvc=bulk_size,
            size=self.pvc_size,
            burst=True,
        )
        logging.info(f"PVC creation dir is {yaml_creation_dir}")

        for pvc_obj in pvc_objs:
            pvc_obj.reload()
            teardown_factory(pvc_obj)
        with ThreadPoolExecutor(max_workers=5) as executor:
            for pvc_obj in pvc_objs:
                executor.submit(helpers.wait_for_resource_state, pvc_obj,
                                constants.STATUS_BOUND)

                executor.submit(pvc_obj.reload)

        start_time = helpers.get_provision_time(self.interface,
                                                pvc_objs,
                                                status="start")
        end_time = helpers.get_provision_time(self.interface,
                                              pvc_objs,
                                              status="end")
        total_time = (end_time - start_time).total_seconds()
        logging.info(
            f"{bulk_size} Bulk PVCs creation time is {total_time} seconds.")

        if total_time > bulk_creation_time_limit:
            raise ex.PerformanceException(
                f"{bulk_size} Bulk PVCs creation time is {total_time} and "
                f"greater than {bulk_creation_time_limit} seconds")

        pv_names_list = []
        for pvc_obj in pvc_objs:
            pv_names_list.append(pvc_obj.backed_pv)

        logging.info(f"Starting to delete bulk of {bulk_size} PVCs")
        helpers.delete_bulk_pvcs(yaml_creation_dir, pv_names_list)
        logging.info(
            f"Deletion of bulk of {bulk_size} PVCs successfully completed")

        log_deletion_times = helpers.measure_pv_deletion_time_bulk(
            self.interface, pv_names_list, return_log_times=True)

        all_start_times = [
            a_tuple[0] for a_tuple in log_deletion_times.values()
        ]
        bulk_start_time = sorted(all_start_times)[0]  # the eariles start time
        start_deletion_time = datetime.datetime.strptime(
            bulk_start_time, helpers.DATE_TIME_FORMAT)

        all_end_times = [a_tuple[1] for a_tuple in log_deletion_times.values()]
        bulk_deletion_time = sorted(all_end_times)[-1]  # the latest end time
        end_deletion_time = datetime.datetime.strptime(
            bulk_deletion_time, helpers.DATE_TIME_FORMAT)

        total_deletion_time = (end_deletion_time -
                               start_deletion_time).total_seconds()
        logging.info(
            f"{bulk_size} Bulk PVCs deletion time is {total_deletion_time} seconds."
        )
Exemplo n.º 16
0
    def test_pod_start_time(
        self,
        interface,
        storageclass_factory,
        samples_num,
        pvc_size,
    ):
        """
        Test to log pod total and csi start times for all the sampled pods
        """

        self.interface = interface
        self.samples_num = samples_num
        self.pvc_size = pvc_size
        self.sc_obj = storageclass_factory(self.interface)
        self.msg_prefix = f"Interface: {self.interface}, PVC size: {self.pvc_size} GB."

        if self.interface == constants.CEPHBLOCKPOOL:
            self.sc = "RBD"
        elif self.interface == constants.CEPHFILESYSTEM:
            self.sc = "CephFS"

        self.results_path = os.path.join("/", *self.results_path,
                                         "test_pod_start_time")

        # Getting the test start time
        self.test_start_time = self.get_time()

        # The actual test start here
        self.run()

        # Collecting environment information
        self.get_env_info()

        # Initialize the results doc file.
        self.full_results = self.init_full_results(
            ResultsAnalyse(self.uuid, self.crd_data, self.full_log_path))

        # Verify that all sample are in the acceptable time range,
        time_measures = [t["performance"] for t in self.start_time_dict_list]
        for index, start_time in enumerate(time_measures):
            log.info(
                f"{self.msg_prefix} pod number {index+1} start time: {start_time} seconds, "
                f"csi start time is {self.csi_time_dict_list[index]}")
            if start_time > self.acceptable_time:
                raise ex.PerformanceException(
                    f"{self.msg_prefix} Pod number {index} start time is {start_time},"
                    f"which is greater than {self.acceptable_time} seconds")
        self.full_results.all_results["attach_time"] = time_measures
        self.full_results.all_results[
            "attach_csi_time"] = self.csi_time_dict_list

        # Calculating the attache average time, and the STD between all samples.
        average = statistics.mean(time_measures)
        log.info(
            f"{self.msg_prefix} The average time for the sampled {len(time_measures)} pods is {average} seconds."
        )
        self.full_results.add_key("attach_time_average", average)

        csi_average = statistics.mean(self.csi_time_dict_list)
        log.info(
            f"{self.msg_prefix} The average csi time for the sampled {len(self.csi_time_dict_list)} pods"
            f" is {csi_average} seconds.")
        self.full_results.add_key("attach_csi_time_average", csi_average)

        st_deviation = statistics.stdev(time_measures)
        st_deviation_percent = st_deviation / average * 100.0
        log.info(
            f"{self.msg_prefix} The standard deviation percent for the sampled {len(time_measures)} pods"
            f" is {st_deviation_percent}")
        self.full_results.add_key("attach_time_stdev_percent",
                                  st_deviation_percent)

        cai_st_deviation = statistics.stdev(self.csi_time_dict_list)
        csi_st_deviation_percent = cai_st_deviation / csi_average * 100.0
        log.info(
            f"{self.msg_prefix} The standard deviation percent for csi start time of the sampled "
            f"{len(time_measures)} pods is {csi_st_deviation_percent}")
        self.full_results.add_key("attach_time_csi_stdev_percent",
                                  csi_st_deviation_percent)

        # Getting the test end time
        self.test_end_time = self.get_time()

        # Add the test time to the ES report
        self.full_results.add_key("test_time", {
            "start": self.test_start_time,
            "end": self.test_end_time
        })

        # Write the test results into the ES server
        if self.full_results.es_write():
            res_link = self.full_results.results_link()
            log.info(f"The Result can be found at : {res_link}")

            # Create text file with results of all subtest (4 - according to the parameters)
            self.write_result_to_file(res_link)
    def test_pvc_creation_deletion_measurement_performance(
            self, teardown_factory, pvc_size):
        """
        Measuring PVC creation and deletion times for pvc samples
        Verifying that those times are within required limits
        """

        # Getting the full path for the test logs
        self.full_log_path = get_full_test_logs_path(cname=self)
        if self.interface == constants.CEPHBLOCKPOOL:
            self.sc = "RBD"
        if self.interface == constants.CEPHFILESYSTEM:
            self.sc = "CephFS"
        self.full_log_path += f"-{self.sc}-{pvc_size}"
        log.info(f"Logs file path name is : {self.full_log_path}")

        self.start_time = time.strftime("%Y-%m-%dT%H:%M:%SGMT", time.gmtime())

        self.get_env_info()

        # Initialize the results doc file.
        self.full_results = self.init_full_results(
            ResultsAnalyse(self.uuid, self.crd_data, self.full_log_path))
        self.full_results.add_key("pvc_size", pvc_size)
        num_of_samples = 5
        accepted_creation_time = 1
        accepted_deletion_time = 2 if self.interface == constants.CEPHFILESYSTEM else 1
        self.full_results.add_key("samples", num_of_samples)

        accepted_creation_deviation_percent = 50
        accepted_deletion_deviation_percent = 50

        creation_time_measures = []
        deletion_time_measures = []
        msg_prefix = f"Interface: {self.interface}, PVC size: {pvc_size}."

        for i in range(num_of_samples):
            logging.info(f"{msg_prefix} Start creating PVC number {i + 1}.")
            start_time = datetime.datetime.utcnow().strftime(
                "%Y-%m-%dT%H:%M:%SZ")
            pvc_obj = helpers.create_pvc(sc_name=self.sc_obj.name,
                                         size=pvc_size)
            helpers.wait_for_resource_state(pvc_obj, constants.STATUS_BOUND)
            pvc_obj.reload()

            creation_time = performance_lib.measure_pvc_creation_time(
                self.interface, pvc_obj.name, start_time)

            logging.info(
                f"{msg_prefix} PVC number {i + 1} was created in {creation_time} seconds."
            )
            if creation_time > accepted_creation_time:
                raise ex.PerformanceException(
                    f"{msg_prefix} PVC creation time is {creation_time} and is greater than "
                    f"{accepted_creation_time} seconds.")
            creation_time_measures.append(creation_time)

            pv_name = pvc_obj.backed_pv
            pvc_reclaim_policy = pvc_obj.reclaim_policy

            pod_obj = self.write_file_on_pvc(pvc_obj)
            pod_obj.delete(wait=True)
            teardown_factory(pvc_obj)
            logging.info(f"{msg_prefix} Start deleting PVC number {i + 1}")
            if pvc_reclaim_policy == constants.RECLAIM_POLICY_DELETE:
                pvc_obj.delete()
                pvc_obj.ocp.wait_for_delete(pvc_obj.name)
                helpers.validate_pv_delete(pvc_obj.backed_pv)
                deletion_time = helpers.measure_pvc_deletion_time(
                    self.interface, pv_name)
                logging.info(
                    f"{msg_prefix} PVC number {i + 1} was deleted in {deletion_time} seconds."
                )
                if deletion_time > accepted_deletion_time:
                    raise ex.PerformanceException(
                        f"{msg_prefix} PVC deletion time is {deletion_time} and is greater than "
                        f"{accepted_deletion_time} seconds.")
                deletion_time_measures.append(deletion_time)
            else:
                logging.info(
                    f"Reclaim policy of the PVC {pvc_obj.name} is not Delete;"
                    f" therefore not measuring deletion time for this PVC.")

        creation_average = self.process_time_measurements(
            "creation",
            creation_time_measures,
            accepted_creation_deviation_percent,
            msg_prefix,
        )
        self.full_results.add_key("creation-time", creation_average)
        deletion_average = self.process_time_measurements(
            "deletion",
            deletion_time_measures,
            accepted_deletion_deviation_percent,
            msg_prefix,
        )
        self.full_results.add_key("deletion-time", deletion_average)
        self.full_results.all_results["creation"] = creation_time_measures
        self.full_results.all_results["deletion"] = deletion_time_measures
        self.end_time = time.strftime("%Y-%m-%dT%H:%M:%SGMT", time.gmtime())
        self.full_results.add_key("test_time", {
            "start": self.start_time,
            "end": self.end_time
        })
        self.full_results.es_write()
        log.info(
            f"The Result can be found at : {self.full_results.results_link()}")

        if not self.dev_mode:
            # all the results are OK, the test passes, push the results to the codespeed
            push_to_pvc_time_dashboard(self.interface, "1-pvc-creation",
                                       creation_average)
            push_to_pvc_time_dashboard(self.interface, "1-pvc-deletion",
                                       deletion_average)
    def test_bulk_pvc_creation_after_deletion_performance(
            self, teardown_factory):
        """
        Measuring PVC creation time of bulk of 75% of initial PVC bulk (120) in the same
        rate after deleting ( serial deletion) 75% of the initial PVCs
        and sends results to the Elastic Search DB

        Args:
            teardown_factory: A fixture used when we want a new resource that was created during the tests
                               to be removed in the teardown phase.
        Returns:

        """
        initial_number_of_pvcs = 120
        number_of_pvcs = math.ceil(initial_number_of_pvcs * 0.75)

        log.info(f"Start creating new {initial_number_of_pvcs} PVCs in a bulk")
        pvc_objs, _ = helpers.create_multiple_pvcs(
            sc_name=self.sc_obj.name,
            namespace=self.namespace,
            number_of_pvc=initial_number_of_pvcs,
            size=self.pvc_size,
            burst=True,
        )
        for pvc_obj in pvc_objs:
            teardown_factory(pvc_obj)
        with ThreadPoolExecutor() as executor:
            for pvc_obj in pvc_objs:
                executor.submit(helpers.wait_for_resource_state, pvc_obj,
                                constants.STATUS_BOUND)

                executor.submit(pvc_obj.reload)
        log.info("Deleting 75% of the PVCs - 90 PVCs")
        assert pvc.delete_pvcs(pvc_objs[:number_of_pvcs],
                               True), "Deletion of 75% of PVCs failed"
        log.info("Re-creating the 90 PVCs")
        pvc_objs, _ = helpers.create_multiple_pvcs(
            sc_name=self.sc_obj.name,
            namespace=self.namespace,
            number_of_pvc=number_of_pvcs,
            size=self.pvc_size,
            burst=True,
        )
        start_time = helpers.get_provision_time(self.interface,
                                                pvc_objs,
                                                status="start")
        end_time = helpers.get_provision_time(self.interface,
                                              pvc_objs,
                                              status="end")
        total = end_time - start_time
        total_time = total.total_seconds()
        logging.info(
            f"Creation after deletion time of {number_of_pvcs} is {total_time} seconds."
        )

        for pvc_obj in pvc_objs:
            teardown_factory(pvc_obj)
        with ThreadPoolExecutor() as executor:
            for pvc_obj in pvc_objs:
                executor.submit(helpers.wait_for_resource_state, pvc_obj,
                                constants.STATUS_BOUND)

                executor.submit(pvc_obj.reload)
        if total_time > 50:
            raise ex.PerformanceException(
                f"{number_of_pvcs} PVCs creation (after initial deletion of "
                f"75% of PVCs) time is {total_time} and greater than 50 seconds."
            )
        logging.info(
            f"{number_of_pvcs} PVCs creation time took less than a 50 seconds")

        # Produce ES report
        # Collecting environment information
        self.get_env_info()

        # Initialize the results doc file.
        full_results = self.init_full_results(
            ResultsAnalyse(
                self.uuid,
                self.crd_data,
                self.full_log_path,
                "bulk_pvc_creation_after_deletion_measurement",
            ))

        full_results.add_key("interface", self.interface)
        full_results.add_key("number_of_pvcs", number_of_pvcs)
        full_results.add_key("pvc_size", self.pvc_size)
        full_results.add_key("creation_after_deletion_time", total_time)

        # Write the test results into the ES server
        full_results.es_write()
    def test_pvc_creation_deletion_measurement_performance(
            self, interface_type, pvc_size):
        """
        Measuring PVC creation and deletion times for pvc samples.
        filling up each PVC with 70% of data.
        Verifying that those times are within the required limits

        Args:
            interface_type (str): the interface type to run against -
                CephBlockPool or CephFileSystem
            pvc_size (str): the size of the pvc to create
        """

        # Initializing test variables
        self.interface = interface_type

        num_of_samples = 5
        if self.dev_mode:
            num_of_samples = 2

        accepted_creation_time = 1
        accepted_deletion_time = Interface_Info[self.interface]["delete_time"]
        accepted_creation_deviation_percent = 50
        accepted_deletion_deviation_percent = 50

        all_mesuring_times = {
            "create": [],
            "delete": [],
            "csi_create": [],
            "csi_delete": [],
        }

        msg_prefix = f"Interface: {self.interface}, PVC size: {pvc_size}."

        self.set_results_path_and_file(
            "test_pvc_creation_deletion_measurement_performance")

        self.start_time = self.get_time()

        self.get_env_info()

        # Initialize the results doc file.
        self.full_results = self.init_full_results(
            ResultsAnalyse(
                self.uuid,
                self.crd_data,
                self.full_log_path,
                "pvc_create_delete_fullres",
            ))
        self.full_results.add_key("pvc_size", pvc_size)
        self.full_results.add_key("samples", num_of_samples)

        self.create_fio_pod_yaml(pvc_size=int(pvc_size.replace("Gi", "")))

        # Creating PVC(s) for creation time mesurment
        start_time = self.create_pvcs_and_wait_for_bound(msg_prefix,
                                                         num_of_samples,
                                                         pvc_size,
                                                         burst=False)

        # Fillup the PVC with data (70% of the total PVC size)
        self.run_io()

        # Deleting PVC(s) for deletion time mesurment
        log.info("Try to delete all created PVCs")
        for pvc_obj in self.pvc_objs:
            pvc_obj.delete()

        log.info("Wait for all PVC(s) to be deleted")
        performance_lib.wait_for_resource_bulk_status("pvc", 0, self.namespace,
                                                      constants.STATUS_BOUND,
                                                      num_of_samples * 2, 5)
        log.info("All PVC(s) was deleted")

        mesure_data = "create"
        rec_policy = performance_lib.run_oc_command(
            f'get sc {Interface_Info[self.interface]["sc"]} -o jsonpath="' +
            '{.reclaimPolicy}"')

        if rec_policy[0].strip('"') == constants.RECLAIM_POLICY_DELETE:
            log.info("Wait for all PVC(s) backed PV(s) to be deleted")
            # Timeout for each PV to be deleted is 20 sec.
            performance_lib.wait_for_resource_bulk_status(
                "pv", 0, self.namespace, self.namespace, num_of_samples * 20,
                5)
            log.info("All backed PV(s) was deleted")
            mesure_data = "all"

        # Mesuring the time it took to create and delete the PVC(s)
        log.info("Reading Creation/Deletion time from provisioner logs")
        self.results_times = performance_lib.get_pvc_provision_times(
            interface=self.interface,
            pvc_name=self.pvc_objs,
            start_time=start_time,
            time_type="all",
            op=mesure_data,
        )

        # Analaysing the test results
        for i, pvc_res in enumerate(self.results_times):
            data = self.results_times[pvc_res]
            msg = f"{msg_prefix} PVC number {i + 1} was"
            for op in Operations_Mesurment:
                log.info(f"{msg} {op}d in {data[op]['time']} seconds.")

            if data["create"]["time"] > accepted_creation_time:
                raise ex.PerformanceException(
                    f"{msg_prefix} PVC creation time is {data['create']['time']} and is greater than "
                    f"{accepted_creation_time} seconds.")

            if rec_policy == constants.RECLAIM_POLICY_DELETE:
                if data["delete"]["time"] > accepted_deletion_time:
                    raise ex.PerformanceException(
                        f"{msg_prefix} PVC deletion time is {data['delete']['time']} and is greater than "
                        f"{accepted_deletion_time} seconds.")
                all_mesuring_times["delete"].append(data["delete"]["time"])
                all_mesuring_times["csi_delete"].append(
                    data["csi_delete"]["time"])

            all_mesuring_times["create"].append(data["create"]["time"])
            all_mesuring_times["csi_create"].append(data["csi_create"]["time"])

        for op in Operations_Mesurment:
            if rec_policy == constants.RECLAIM_POLICY_DELETE and "del" in op:
                self.process_time_measurements(
                    op,
                    all_mesuring_times[op],
                    accepted_deletion_deviation_percent,
                    msg_prefix,
                )
            if "create" in op:
                self.process_time_measurements(
                    op,
                    all_mesuring_times[op],
                    accepted_creation_deviation_percent,
                    msg_prefix,
                )

        self.full_results.all_results = self.results_times
        self.end_time = self.get_time()
        self.full_results.add_key("test_time", {
            "start": self.start_time,
            "end": self.end_time
        })
        if self.full_results.es_write():
            res_link = self.full_results.results_link()
            log.info(f"The Result can be found at : {res_link}")

            # Create text file with results of all subtest (6 - according to the parameters)
            self.write_result_to_file(res_link)
    def test_multiple_pvc_deletion_measurement_performance(
            self, interface_type):
        """
        Measuring PVC deletion time of 120 PVCs in 180 seconds

        Args:
            interface_type: the inteface type which the test run with - RBD / CephFS.

        """
        # Initialize the test variables
        self.interface = interface_type

        number_of_pvcs = 120
        if self.dev_mode:
            number_of_pvcs = 5

        pvc_size = "1Gi"

        # accepted deletion time is 2 secs for each PVC
        accepted_pvc_deletion_time = number_of_pvcs * 2

        msg_prefix = f"Interface: {self.interface}, PVC size: {pvc_size}."
        self.set_results_path_and_file(
            "test_multiple_pvc_deletion_measurement_performance")
        bulk_data = {
            "create": {
                "start": [],
                "end": []
            },
            "csi_create": {
                "start": [],
                "end": []
            },
            "delete": {
                "start": [],
                "end": []
            },
            "csi_delete": {
                "start": [],
                "end": []
            },
        }
        bulk_times = {
            "create": None,
            "delete": None,
            "csi_create": None,
            "csi_delete": None,
        }

        self.start_time = self.get_time()

        self.get_env_info()

        # Initialize the results doc file.
        self.full_results = self.init_full_results(
            ResultsAnalyse(
                self.uuid,
                self.crd_data,
                self.full_log_path,
                "pvc_bulk_deletion_fullres",
            ))
        self.full_results.add_key("bulk_size", number_of_pvcs)
        self.full_results.add_key("pvc_size", pvc_size)

        self.create_fio_pod_yaml(pvc_size=int(pvc_size.replace("Gi", "")))

        # Creating PVC(s) for creation time mesurment and wait for bound state
        start_time = self.create_pvcs_and_wait_for_bound(msg_prefix,
                                                         number_of_pvcs,
                                                         pvc_size,
                                                         burst=True)

        # Fillup the PVC with data (70% of the total PVC size)
        self.run_io()

        # Deleting PVC(s) for deletion time mesurment
        log.info("Try to delete all created PVCs")
        for pvc_obj in self.pvc_objs:
            pvc_obj.delete(wait=False)

        performance_lib.wait_for_resource_bulk_status("pvc", 0, self.namespace,
                                                      constants.STATUS_BOUND,
                                                      number_of_pvcs * 2, 5)
        log.info("All PVC(s) was deleted")

        log.info("Wait for all PVC(s) backed PV(s) to be deleted")
        # Timeout for each PV to be deleted is 20 sec.
        performance_lib.wait_for_resource_bulk_status("pv", 0, self.namespace,
                                                      self.namespace,
                                                      number_of_pvcs * 20, 5)
        log.info("All backed PV(s) was deleted")

        # Mesuring the time it took to delete the PVC(s)
        log.info("Reading Creation/Deletion time from provisioner logs")
        self.results_times = performance_lib.get_pvc_provision_times(
            interface=self.interface,
            pvc_name=self.pvc_objs,
            start_time=start_time,
            time_type="all",
            op="all",
        )
        for i, pvc_res in enumerate(self.results_times):
            data = self.results_times[pvc_res]
            msg = f"{msg_prefix} PVC number {i + 1} was"
            for op in Operations_Mesurment:
                log.info(f"{msg} {op}d in {data[op]['time']} seconds.")

                bulk_data[op]["start"].append(data[op]["start"])
                bulk_data[op]["end"].append(data[op]["end"])

            if data["delete"]["time"] > accepted_pvc_deletion_time:
                raise ex.PerformanceException(
                    f"{msg_prefix} {number_of_pvcs} PVCs deletion time is {data['delete']['time']} "
                    f"and is greater than {accepted_pvc_deletion_time} seconds"
                )

        for op in Operations_Mesurment:
            bulk_times[op] = {
                "start": sorted(bulk_data[op]["start"])[0],
                "end": sorted(bulk_data[op]["end"])[-1],
                "time": None,
            }
            bulk_times[op]["time"] = performance_lib.calculate_operation_time(
                f"bulk_{op}", bulk_times[op])

            log.info(
                f"Bulk {op}ion Time is : { bulk_times[op]['time']} seconds")
            self.full_results.add_key(f"multi_{op}", bulk_times[op]["time"])

        self.full_results.all_results = self.results_times
        self.end_time = self.get_time()
        self.full_results.add_key("test_time", {
            "start": self.start_time,
            "end": self.end_time
        })

        if self.full_results.es_write():
            res_link = self.full_results.results_link()
            log.info(f"The Result can be found at : {res_link}")

            # Create text file with results of all subtest (3 - according to the parameters)
            self.write_result_to_file(res_link)
    def test_bulk_pvc_creation_deletion_measurement_performance(
            self, teardown_factory, bulk_size):
        """
        Measuring PVC creation and deletion time of bulk_size PVCs
        and sends results to the Elastic Search DB

        Args:
            teardown_factory: A fixture used when we want a new resource that was created during the tests
                               to be removed in the teardown phase.
            bulk_size: Size of the bulk to be tested
        Returns:

        """
        bulk_creation_time_limit = bulk_size / 2
        log.info(f"Start creating new {bulk_size} PVCs")

        pvc_objs, yaml_creation_dir = helpers.create_multiple_pvcs(
            sc_name=self.sc_obj.name,
            namespace=self.namespace,
            number_of_pvc=bulk_size,
            size=self.pvc_size,
            burst=True,
        )
        logging.info(f"PVC creation dir is {yaml_creation_dir}")

        for pvc_obj in pvc_objs:
            pvc_obj.reload()
            teardown_factory(pvc_obj)
        with ThreadPoolExecutor(max_workers=5) as executor:
            for pvc_obj in pvc_objs:
                executor.submit(helpers.wait_for_resource_state, pvc_obj,
                                constants.STATUS_BOUND)
                executor.submit(pvc_obj.reload)

        start_time = helpers.get_provision_time(self.interface,
                                                pvc_objs,
                                                status="start")
        end_time = helpers.get_provision_time(self.interface,
                                              pvc_objs,
                                              status="end")
        total_time = (end_time - start_time).total_seconds()
        logging.info(
            f"{bulk_size} Bulk PVCs creation time is {total_time} seconds.")

        if total_time > bulk_creation_time_limit:
            raise ex.PerformanceException(
                f"{bulk_size} Bulk PVCs creation time is {total_time} and "
                f"greater than {bulk_creation_time_limit} seconds")

        pv_names_list = []
        for pvc_obj in pvc_objs:
            pv_names_list.append(pvc_obj.backed_pv)

        logging.info(f"Starting to delete bulk of {bulk_size} PVCs")
        helpers.delete_bulk_pvcs(yaml_creation_dir,
                                 pv_names_list,
                                 namespace=self.namespace)
        logging.info(
            f"Deletion of bulk of {bulk_size} PVCs successfully completed")

        log_deletion_times = helpers.measure_pv_deletion_time_bulk(
            self.interface, pv_names_list, return_log_times=True)

        all_start_times = [
            a_tuple[0] for a_tuple in log_deletion_times.values()
        ]
        bulk_start_time = sorted(all_start_times)[0]  # the eariles start time
        start_deletion_time = datetime.datetime.strptime(
            bulk_start_time, helpers.DATE_TIME_FORMAT)

        all_end_times = [a_tuple[1] for a_tuple in log_deletion_times.values()]
        bulk_deletion_time = sorted(all_end_times)[-1]  # the latest end time
        end_deletion_time = datetime.datetime.strptime(
            bulk_deletion_time, helpers.DATE_TIME_FORMAT)

        total_deletion_time = (end_deletion_time -
                               start_deletion_time).total_seconds()
        logging.info(
            f"{bulk_size} Bulk PVCs deletion time is {total_deletion_time} seconds."
        )

        # Produce ES report
        # Collecting environment information
        self.get_env_info()

        # Initialize the results doc file.
        full_results = self.init_full_results(
            ResultsAnalyse(
                self.uuid,
                self.crd_data,
                self.full_log_path,
                "bulk_creation_deletion_measurement",
            ))

        full_results.add_key("interface", self.interface)
        full_results.add_key("bulk_size", bulk_size)
        full_results.add_key("pvc_size", self.pvc_size)
        full_results.add_key("bulk_pvc_creation_time", total_time)
        full_results.add_key("bulk_pvc_deletion_time", total_deletion_time)

        # Write the test results into the ES server
        full_results.es_write()
Exemplo n.º 22
0
    def test_pvc_reattach_time_performance(self, pvc_factory,
                                           teardown_factory):
        """
        Test assign nodeName to a pod using RWX pvc
        Performance in test_multiple_pvc_creation_measurement_performance
        Each kernel (unzipped) is 892M and 61694 files
        """

        kernel_url = "https://cdn.kernel.org/pub/linux/kernel/v4.x/linux-4.19.5.tar.gz"
        download_path = "tmp"
        # Number of times we copy the kernel
        copies = 3

        # Download a linux Kernel
        import os

        dir_path = os.path.join(os.getcwd(), download_path)
        file_path = os.path.join(dir_path, "file.gz")
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)
        urllib.request.urlretrieve(kernel_url, file_path)

        worker_nodes_list = node.get_worker_nodes()
        assert len(worker_nodes_list) > 1
        node_one = worker_nodes_list[0]
        node_two = worker_nodes_list[1]

        # Create a PVC
        accessmode = constants.ACCESS_MODE_RWX
        if self.interface == constants.CEPHBLOCKPOOL:
            accessmode = constants.ACCESS_MODE_RWO
        pvc_obj = pvc_factory(
            interface=self.interface,
            access_mode=accessmode,
            status=constants.STATUS_BOUND,
            size="15",
        )

        # Create a pod on one node
        logging.info(
            f"Creating Pod with pvc {pvc_obj.name} on node {node_one}")

        helpers.pull_images(constants.PERF_IMAGE)
        pod_obj1 = helpers.create_pod(
            interface_type=self.interface,
            pvc_name=pvc_obj.name,
            namespace=pvc_obj.namespace,
            node_name=node_one,
            pod_dict_path=constants.PERF_POD_YAML,
        )

        # Confirm that pod is running on the selected_nodes
        logging.info("Checking whether pods are running on the selected nodes")
        helpers.wait_for_resource_state(resource=pod_obj1,
                                        state=constants.STATUS_RUNNING,
                                        timeout=120)

        pod_name = pod_obj1.name
        pod_path = "/mnt"

        _ocp = OCP(namespace=pvc_obj.namespace)

        rsh_cmd = f"rsync {dir_path} {pod_name}:{pod_path}"
        _ocp.exec_oc_cmd(rsh_cmd)

        rsh_cmd = f"exec {pod_name} -- tar xvf {pod_path}/tmp/file.gz -C {pod_path}/tmp"
        _ocp.exec_oc_cmd(rsh_cmd)

        for x in range(copies):
            rsh_cmd = f"exec {pod_name} -- mkdir -p {pod_path}/folder{x}"
            _ocp.exec_oc_cmd(rsh_cmd)
            rsh_cmd = f"exec {pod_name} -- cp -r {pod_path}/tmp {pod_path}/folder{x}"
            _ocp.exec_oc_cmd(rsh_cmd)
            rsh_cmd = f"exec {pod_name} -- sync"
            _ocp.exec_oc_cmd(rsh_cmd)

        log.info("Getting the amount of data written to the PVC")
        rsh_cmd = f"exec {pod_name} -- df -h {pod_path}"
        data_written = _ocp.exec_oc_cmd(rsh_cmd).split()[-4]
        log.info(
            f"The Amount of data that was written to the pod is {data_written}"
        )
        rsh_cmd = f"delete pod {pod_name}"
        _ocp.exec_oc_cmd(rsh_cmd)

        logging.info(
            f"Creating Pod with pvc {pvc_obj.name} on node {node_two}")

        pod_obj2 = helpers.create_pod(
            interface_type=self.interface,
            pvc_name=pvc_obj.name,
            namespace=pvc_obj.namespace,
            node_name=node_two,
            pod_dict_path=constants.PERF_POD_YAML,
        )

        start_time = time.time()

        pod_name = pod_obj2.name
        helpers.wait_for_resource_state(resource=pod_obj2,
                                        state=constants.STATUS_RUNNING,
                                        timeout=120)
        end_time = time.time()
        total_time = end_time - start_time
        if total_time > 60:
            raise ex.PerformanceException(
                f"Pod creation time is {total_time} and greater than 60 seconds"
            )
        logging.info(f"Pod {pod_name} creation time took {total_time} seconds")

        teardown_factory(pod_obj2)
        os.remove(file_path)
        os.rmdir(dir_path)
Exemplo n.º 23
0
    def test_pod_reattach_time_performance(
        self, storageclass_factory, copies, timeout, total_time_limit
    ):
        """
        Test assign nodeName to a pod using RWX pvc
        Each kernel (unzipped) is 892M and 61694 files
        The test creates samples_num pvcs and pods, writes kernel files multiplied by number of copies
        and calculates average total and csi reattach times and standard deviation
        """
        kernel_url = "https://cdn.kernel.org/pub/linux/kernel/v4.x/linux-4.19.5.tar.gz"
        download_path = "tmp"

        samples_num = 7
        if self.dev_mode:
            samples_num = 3

        test_start_time = PASTest.get_time()
        helpers.pull_images(constants.PERF_IMAGE)
        # Download a linux Kernel

        dir_path = os.path.join(os.getcwd(), download_path)
        file_path = os.path.join(dir_path, "file.gz")
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)
        urllib.request.urlretrieve(kernel_url, file_path)

        worker_nodes_list = node.get_worker_nodes()
        assert len(worker_nodes_list) > 1
        node_one = worker_nodes_list[0]
        node_two = worker_nodes_list[1]

        time_measures, csi_time_measures, files_written_list, data_written_list = (
            [],
            [],
            [],
            [],
        )

        self.sc_obj = storageclass_factory(self.interface)
        for sample_index in range(1, samples_num + 1):

            csi_start_time = self.get_time("csi")

            logger.info(f"Start creating PVC number {sample_index}.")
            pvc_obj = helpers.create_pvc(
                sc_name=self.sc_obj.name, size="100Gi", namespace=self.namespace
            )
            helpers.wait_for_resource_state(pvc_obj, constants.STATUS_BOUND)

            # Create a pod on one node
            logger.info(f"Creating Pod with pvc {pvc_obj.name} on node {node_one}")

            pvc_obj.reload()
            self.pvc_list.append(pvc_obj)

            try:
                pod_obj1 = helpers.create_pod(
                    interface_type=self.interface,
                    pvc_name=pvc_obj.name,
                    namespace=pvc_obj.namespace,
                    node_name=node_one,
                    pod_dict_path=constants.PERF_POD_YAML,
                )
            except Exception as e:
                logger.error(
                    f"Pod on PVC {pvc_obj.name} was not created, exception {str(e)}"
                )
                raise PodNotCreated("Pod on PVC was not created.")

            # Confirm that pod is running on the selected_nodes
            logger.info("Checking whether pods are running on the selected nodes")
            helpers.wait_for_resource_state(
                resource=pod_obj1, state=constants.STATUS_RUNNING, timeout=timeout
            )

            pod_name = pod_obj1.name
            pod_path = "/mnt"

            _ocp = OCP(namespace=pvc_obj.namespace)

            rsh_cmd = f"rsync {dir_path} {pod_name}:{pod_path}"
            _ocp.exec_oc_cmd(rsh_cmd)

            rsh_cmd = (
                f"exec {pod_name} -- tar xvf {pod_path}/tmp/file.gz -C {pod_path}/tmp"
            )
            _ocp.exec_oc_cmd(rsh_cmd)

            for x in range(copies):
                rsh_cmd = f"exec {pod_name} -- mkdir -p {pod_path}/folder{x}"
                _ocp.exec_oc_cmd(rsh_cmd)
                rsh_cmd = (
                    f"exec {pod_name} -- cp -r {pod_path}/tmp {pod_path}/folder{x}"
                )
                _ocp.exec_oc_cmd(rsh_cmd)
                rsh_cmd = f"exec {pod_name} -- sync"
                _ocp.exec_oc_cmd(rsh_cmd)

            logger.info("Getting the amount of data written to the PVC")
            rsh_cmd = f"exec {pod_name} -- df -h {pod_path}"
            data_written_str = _ocp.exec_oc_cmd(rsh_cmd).split()[-4]
            logger.info(f"The amount of written data is {data_written_str}")
            data_written = float(data_written_str[:-1])

            rsh_cmd = f"exec {pod_name} -- find {pod_path} -type f"
            files_written = len(_ocp.exec_oc_cmd(rsh_cmd).split())
            logger.info(
                f"For {self.interface} - The number of files written to the pod is {files_written}"
            )
            files_written_list.append(files_written)
            data_written_list.append(data_written)

            logger.info("Deleting the pod")
            rsh_cmd = f"delete pod {pod_name}"
            _ocp.exec_oc_cmd(rsh_cmd)

            logger.info(f"Creating Pod with pvc {pvc_obj.name} on node {node_two}")

            try:
                pod_obj2 = helpers.create_pod(
                    interface_type=self.interface,
                    pvc_name=pvc_obj.name,
                    namespace=pvc_obj.namespace,
                    node_name=node_two,
                    pod_dict_path=constants.PERF_POD_YAML,
                )
            except Exception as e:
                logger.error(
                    f"Pod on PVC {pvc_obj.name} was not created, exception {str(e)}"
                )
                raise PodNotCreated("Pod on PVC was not created.")

            start_time = time.time()

            pod_name = pod_obj2.name
            helpers.wait_for_resource_state(
                resource=pod_obj2, state=constants.STATUS_RUNNING, timeout=timeout
            )
            end_time = time.time()
            total_time = end_time - start_time
            if total_time > total_time_limit:
                logger.error(
                    f"Pod creation time is {total_time} and greater than {total_time_limit} seconds"
                )
                raise ex.PerformanceException(
                    f"Pod creation time is {total_time} and greater than {total_time_limit} seconds"
                )

            csi_time = performance_lib.pod_attach_csi_time(
                self.interface, pvc_obj.backed_pv, csi_start_time, pvc_obj.namespace
            )[0]
            csi_time_measures.append(csi_time)
            logger.info(
                f"PVC #{pvc_obj.name} pod {pod_name} creation time took {total_time} seconds, "
                f"csi time is {csi_time} seconds"
            )
            time_measures.append(total_time)

            logger.info("Deleting the pod")
            rsh_cmd = f"delete pod {pod_name}"
            _ocp.exec_oc_cmd(rsh_cmd)
            # teardown_factory(pod_obj2)

        average = statistics.mean(time_measures)
        logger.info(
            f"The average time of {self.interface} pod creation on {samples_num} PVCs is {average} seconds"
        )

        st_deviation = statistics.stdev(time_measures)
        logger.info(
            f"The standard deviation of {self.interface} pod creation time on {samples_num} PVCs is {st_deviation}"
        )

        csi_average = statistics.mean(csi_time_measures)
        logger.info(
            f"The average csi time of {self.interface} pod creation on {samples_num} PVCs is {csi_average} seconds"
        )

        csi_st_deviation = statistics.stdev(csi_time_measures)
        logger.info(
            f"The standard deviation of {self.interface} csi pod creation time on {samples_num} PVCs "
            f"is {csi_st_deviation}"
        )

        files_written_average = statistics.mean(files_written_list)
        data_written_average = statistics.mean(data_written_list)

        os.remove(file_path)
        os.rmdir(dir_path)

        # Produce ES report

        # Collecting environment information
        self.get_env_info()

        # Initialize the results doc file.
        full_results = self.init_full_results(
            ResultsAnalyse(
                self.uuid,
                self.crd_data,
                self.full_log_path,
                "pod_reattach_time_fullres",
            )
        )

        full_results.add_key("storageclass", self.sc)
        full_results.add_key("pod_reattach_time", time_measures)
        full_results.add_key("copies_number", copies)
        full_results.add_key("files_number_average", files_written_average)
        full_results.add_key("data_average", data_written_average)
        full_results.add_key("pod_reattach_time_average", average)
        full_results.add_key("pod_reattach_standard_deviation", st_deviation)
        full_results.add_key("pod_csi_reattach_time_average", csi_average)
        full_results.add_key("pod_csi_reattach_standard_deviation", csi_st_deviation)

        test_end_time = PASTest.get_time()

        # Add the test time to the ES report
        full_results.add_key(
            "test_time", {"start": test_start_time, "end": test_end_time}
        )

        # Write the test results into the ES server
        if full_results.es_write():
            res_link = full_results.results_link()
            logger.info(f"The Result can be found at : {res_link}")

            # Create text file with results of all subtest (4 - according to the parameters)
            self.results_path = get_full_test_logs_path(
                cname=self, fname="test_pod_reattach_time_performance"
            )
            self.write_result_to_file(res_link)
    def test_bulk_pvc_creation_deletion_measurement_performance(
        self, storageclass_factory, interface_type, bulk_size
    ):

        """
        Measuring PVC creation and deletion time of bulk_size PVCs
        and sends results to the Elastic Search DB

        Args:
            bulk_size: Size of the bulk to be tested
        Returns:

        """
        self.interface = interface_type
        self.sc_obj = storageclass_factory(self.interface)

        bulk_creation_time_limit = bulk_size / 2

        log.info(f"Start creating new {bulk_size} PVCs")

        # Getting the start time of the test.
        self.test_start_time = self.get_time()

        # Run the Bulk Creation test
        csi_bulk_start_time = self.get_time(time_format="csi")
        self.pvc_bulk_create_and_wait_for_bound(bulk_size)
        log.info(f"PVC creation dir is {self.yaml_creation_dir}")

        total_time = self.get_bulk_creation_time()
        log.info(f"{bulk_size} Bulk PVCs creation time is {total_time} seconds.")
        csi_creation_times = performance_lib.csi_bulk_pvc_time_measure(
            self.interface, self.pvc_objs, "create", csi_bulk_start_time
        )

        if total_time > bulk_creation_time_limit:
            raise ex.PerformanceException(
                f"{bulk_size} Bulk PVCs creation time is {total_time} and "
                f"greater than {bulk_creation_time_limit} seconds"
            )

        # Run the Bulk Deletion test
        pv_names_list = []
        for pvc_obj in self.pvc_objs:
            pv_names_list.append(pvc_obj.backed_pv)

        log.info(f"Starting to delete bulk of {bulk_size} PVCs")
        helpers.delete_bulk_pvcs(
            self.yaml_creation_dir, pv_names_list, namespace=self.namespace
        )
        log.info(f"Deletion of bulk of {bulk_size} PVCs successfully completed")

        log_deletion_times = helpers.measure_pv_deletion_time_bulk(
            self.interface, pv_names_list, return_log_times=True
        )

        all_start_times = [a_tuple[0] for a_tuple in log_deletion_times.values()]
        bulk_start_time = sorted(all_start_times)[0]  # the eariles start time
        start_deletion_time = datetime.datetime.strptime(
            bulk_start_time, helpers.DATE_TIME_FORMAT
        )

        all_end_times = [a_tuple[1] for a_tuple in log_deletion_times.values()]
        bulk_deletion_time = sorted(all_end_times)[-1]  # the latest end time
        end_deletion_time = datetime.datetime.strptime(
            bulk_deletion_time, helpers.DATE_TIME_FORMAT
        )

        total_deletion_time = (end_deletion_time - start_deletion_time).total_seconds()
        log.info(
            f"{bulk_size} Bulk PVCs deletion time is {total_deletion_time} seconds."
        )

        csi_deletion_times = performance_lib.csi_bulk_pvc_time_measure(
            self.interface, self.pvc_objs, "delete", csi_bulk_start_time
        )
        # Getting the end time of the test
        self.test_end_time = self.get_time()

        # reset the list oc PVCs since thay was deleted, and do not need to be deleted
        # in the teardown phase.
        self.pvc_objs = []

        # Produce ES report
        self.results_path = os.path.join(
            "/",
            *self.results_path,
            "test_bulk_pvc_creation_deletion_measurement_performance",
        )

        # Collecting environment information
        self.get_env_info()

        # Initialize the results doc file.
        full_results = self.init_full_results(
            ResultsAnalyse(
                self.uuid,
                self.crd_data,
                self.full_log_path,
                "bulk_creation_deletion_measurement",
            )
        )

        # Add the test time to the ES report
        full_results.add_key(
            "test_time", {"start": self.test_start_time, "end": self.test_end_time}
        )
        full_results.add_key("bulk_size", bulk_size)
        full_results.add_key("bulk_pvc_creation_time", total_time)
        full_results.add_key("bulk_pvc_csi_creation_time", csi_creation_times)
        full_results.add_key("bulk_pvc_deletion_time", total_deletion_time)
        full_results.add_key("bulk_pvc_csi_deletion_time", csi_deletion_times)

        # Write the test results into the ES server
        if full_results.es_write():
            res_link = full_results.results_link()
            log.info(f"The Result can be found at : {res_link}")

            # Create text file with results of all subtest (4 - according to the parameters)
            self.write_result_to_file(res_link)
    def test_multiple_pvc_deletion_measurement_performance(
            self, teardown_factory):
        """
        Measuring PVC deletion time of 120 PVCs in 180 seconds

        Args:
            teardown_factory: A fixture used when we want a new resource that was created during the tests
                               to be removed in the teardown phase.
        Returns:

        """
        number_of_pvcs = 120
        pvc_size = "1Gi"
        msg_prefix = f"Interface: {self.interface}, PVC size: {pvc_size}."

        log.info(f"{msg_prefix} Start creating new 120 PVCs")

        pvc_objs = helpers.create_multiple_pvcs(
            sc_name=self.sc_obj.name,
            namespace=defaults.ROOK_CLUSTER_NAMESPACE,
            number_of_pvc=number_of_pvcs,
            size=pvc_size,
            burst=True,
        )

        for pvc_obj in pvc_objs:
            pvc_obj.reload()
            teardown_factory(pvc_obj)
        with ThreadPoolExecutor(max_workers=5) as executor:
            for pvc_obj in pvc_objs:
                executor.submit(helpers.wait_for_resource_state, pvc_obj,
                                constants.STATUS_BOUND)

                executor.submit(pvc_obj.reload)

        pod_objs = []
        for pvc_obj in pvc_objs:
            pod_obj = self.write_file_on_pvc(pvc_obj, 0.3)
            pod_objs.append(pod_obj)

        # Get pvc_name, require pvc_name to fetch deletion time data from log
        threads = list()
        for pvc_obj in pvc_objs:
            process = threading.Thread(target=pvc_obj.reload)
            process.start()
            threads.append(process)
        for process in threads:
            process.join()

        pvc_name_list, pv_name_list = ([] for i in range(2))
        threads = list()
        for pvc_obj in pvc_objs:
            process1 = threading.Thread(
                target=pvc_name_list.append(pvc_obj.name))
            process2 = threading.Thread(
                target=pv_name_list.append(pvc_obj.backed_pv))
            process1.start()
            process2.start()
            threads.append(process1)
            threads.append(process2)
        for process in threads:
            process.join()
        log.info(f"{msg_prefix} Preparing to delete 120 PVC")

        # Delete PVC
        for pvc_obj, pod_obj in zip(pvc_objs, pod_objs):
            pod_obj.delete(wait=True)
            pvc_obj.delete()
            pvc_obj.ocp.wait_for_delete(pvc_obj.name)

        # Get PVC deletion time
        pvc_deletion_time = helpers.measure_pv_deletion_time_bulk(
            interface=self.interface, pv_name_list=pv_name_list)
        log.info(
            f"{msg_prefix} {number_of_pvcs} bulk deletion time is {pvc_deletion_time}"
        )

        # accepted deletion time is 2 secs for each PVC
        accepted_pvc_deletion_time = number_of_pvcs * 2

        for del_time in pvc_deletion_time.values():
            if del_time > accepted_pvc_deletion_time:
                raise ex.PerformanceException(
                    f"{msg_prefix} {number_of_pvcs} PVCs deletion time is {pvc_deletion_time.values()} and is "
                    f"greater than {accepted_pvc_deletion_time} seconds")

        logging.info(f"{msg_prefix} {number_of_pvcs} PVCs deletion times are:")
        for name, a_time in pvc_deletion_time.items():
            logging.info(f"{name} deletion time is: {a_time} seconds")
    def test_bulk_pvc_creation_after_deletion_performance(
        self, interface_iterate, storageclass_factory
    ):
        """
        Measuring PVC creation time of bulk of 75% of initial PVC bulk (120) in the same
        rate after deleting ( serial deletion) 75% of the initial PVCs
        and sends results to the Elastic Search DB

        """
        self.interface = interface_iterate
        self.sc_obj = storageclass_factory(self.interface)
        initial_number_of_pvcs = 120
        number_of_pvcs = math.ceil(initial_number_of_pvcs * 0.75)

        # Getting the test start time
        self.test_start_time = self.get_time()

        log.info(f"Start creating new {initial_number_of_pvcs} PVCs in a bulk")
        self.pvc_bulk_create_and_wait_for_bound(initial_number_of_pvcs)

        log.info(f"Deleting 75% of the PVCs - {number_of_pvcs} PVCs")
        assert pvc.delete_pvcs(
            self.pvc_objs[:number_of_pvcs], True
        ), "Deletion of 75% of PVCs failed"
        # save the list of pvcs which not deleted, for the teardown phase
        original_pvcs = self.pvc_objs[number_of_pvcs:]

        log.info(f"Re-creating the {number_of_pvcs} PVCs")
        csi_bulk_start_time = self.get_time(time_format="csi")
        self.pvc_bulk_create_and_wait_for_bound(number_of_pvcs)

        # Get the bulk recraation time - total time.
        total_time = self.get_bulk_creation_time()
        log.info(
            f"Creation after deletion time of {number_of_pvcs} is {total_time} seconds."
        )

        if total_time > 50:
            raise ex.PerformanceException(
                f"{number_of_pvcs} PVCs creation (after initial deletion of "
                f"75% of PVCs) time is {total_time} and greater than 50 seconds."
            )
        log.info(f"{number_of_pvcs} PVCs creation time took less than a 50 seconds")

        csi_creation_times = performance_lib.csi_bulk_pvc_time_measure(
            self.interface, self.pvc_objs, "create", csi_bulk_start_time
        )
        # Getting the end time of the test
        self.test_end_time = self.get_time()

        # update the list of pvcs for the teardown process
        self.pvc_objs += original_pvcs

        # Produce ES report
        self.results_path = os.path.join(
            "/",
            *self.results_path,
            "test_bulk_pvc_creation_after_deletion_performance",
        )
        # Collecting environment information
        self.get_env_info()

        # Initialize the results doc file.
        full_results = self.init_full_results(
            ResultsAnalyse(
                self.uuid,
                self.crd_data,
                self.full_log_path,
                "bulk_pvc_creation_after_deletion_measurement",
            )
        )

        # Add the test time to the ES report
        full_results.add_key(
            "test_time", {"start": self.test_start_time, "end": self.test_end_time}
        )

        full_results.add_key("number_of_pvcs", number_of_pvcs)
        full_results.add_key("creation_after_deletion_time", total_time)
        full_results.add_key("creation_after_deletion_csi_time", csi_creation_times)

        # Write the test results into the ES server
        if full_results.es_write():
            res_link = full_results.results_link()
            log.info(f"The Result can be found at : {res_link}")

            # Create text file with results of all subtest (2 - according to the parameters)
            self.write_result_to_file(res_link)