def test_bulk_pvc_creation_deletion_measurement_performance(
        self, storageclass_factory, interface_type, bulk_size
    ):

        """
        Measuring PVC creation and deletion time of bulk_size PVCs
        and sends results to the Elastic Search DB

        Args:
            bulk_size: Size of the bulk to be tested
        Returns:

        """
        self.interface = interface_type
        self.sc_obj = storageclass_factory(self.interface)

        bulk_creation_time_limit = bulk_size / 2

        log.info(f"Start creating new {bulk_size} PVCs")

        # Getting the start time of the test.
        self.test_start_time = self.get_time()

        # Run the Bulk Creation test
        csi_bulk_start_time = self.get_time(time_format="csi")
        self.pvc_bulk_create_and_wait_for_bound(bulk_size)
        log.info(f"PVC creation dir is {self.yaml_creation_dir}")

        total_time = self.get_bulk_creation_time()
        log.info(f"{bulk_size} Bulk PVCs creation time is {total_time} seconds.")
        csi_creation_times = performance_lib.csi_bulk_pvc_time_measure(
            self.interface, self.pvc_objs, "create", csi_bulk_start_time
        )

        if total_time > bulk_creation_time_limit:
            raise ex.PerformanceException(
                f"{bulk_size} Bulk PVCs creation time is {total_time} and "
                f"greater than {bulk_creation_time_limit} seconds"
            )

        # Run the Bulk Deletion test
        pv_names_list = []
        for pvc_obj in self.pvc_objs:
            pv_names_list.append(pvc_obj.backed_pv)

        log.info(f"Starting to delete bulk of {bulk_size} PVCs")
        helpers.delete_bulk_pvcs(
            self.yaml_creation_dir, pv_names_list, namespace=self.namespace
        )
        log.info(f"Deletion of bulk of {bulk_size} PVCs successfully completed")

        log_deletion_times = helpers.measure_pv_deletion_time_bulk(
            self.interface, pv_names_list, return_log_times=True
        )

        all_start_times = [a_tuple[0] for a_tuple in log_deletion_times.values()]
        bulk_start_time = sorted(all_start_times)[0]  # the eariles start time
        start_deletion_time = datetime.datetime.strptime(
            bulk_start_time, helpers.DATE_TIME_FORMAT
        )

        all_end_times = [a_tuple[1] for a_tuple in log_deletion_times.values()]
        bulk_deletion_time = sorted(all_end_times)[-1]  # the latest end time
        end_deletion_time = datetime.datetime.strptime(
            bulk_deletion_time, helpers.DATE_TIME_FORMAT
        )

        total_deletion_time = (end_deletion_time - start_deletion_time).total_seconds()
        log.info(
            f"{bulk_size} Bulk PVCs deletion time is {total_deletion_time} seconds."
        )

        csi_deletion_times = performance_lib.csi_bulk_pvc_time_measure(
            self.interface, self.pvc_objs, "delete", csi_bulk_start_time
        )
        # Getting the end time of the test
        self.test_end_time = self.get_time()

        # reset the list oc PVCs since thay was deleted, and do not need to be deleted
        # in the teardown phase.
        self.pvc_objs = []

        # Produce ES report
        self.results_path = os.path.join(
            "/",
            *self.results_path,
            "test_bulk_pvc_creation_deletion_measurement_performance",
        )

        # Collecting environment information
        self.get_env_info()

        # Initialize the results doc file.
        full_results = self.init_full_results(
            ResultsAnalyse(
                self.uuid,
                self.crd_data,
                self.full_log_path,
                "bulk_creation_deletion_measurement",
            )
        )

        # Add the test time to the ES report
        full_results.add_key(
            "test_time", {"start": self.test_start_time, "end": self.test_end_time}
        )
        full_results.add_key("bulk_size", bulk_size)
        full_results.add_key("bulk_pvc_creation_time", total_time)
        full_results.add_key("bulk_pvc_csi_creation_time", csi_creation_times)
        full_results.add_key("bulk_pvc_deletion_time", total_deletion_time)
        full_results.add_key("bulk_pvc_csi_deletion_time", csi_deletion_times)

        # Write the test results into the ES server
        if full_results.es_write():
            res_link = full_results.results_link()
            log.info(f"The Result can be found at : {res_link}")

            # Create text file with results of all subtest (4 - according to the parameters)
            self.write_result_to_file(res_link)
    def test_bulk_pvc_creation_deletion_measurement_performance(
            self, teardown_factory, bulk_size):
        """
        Measuring PVC creation and deletion time of bulk_size PVCs
        and sends results to the Elastic Search DB

        Args:
            teardown_factory: A fixture used when we want a new resource that was created during the tests
                               to be removed in the teardown phase.
            bulk_size: Size of the bulk to be tested
        Returns:

        """
        bulk_creation_time_limit = bulk_size / 2
        log.info(f"Start creating new {bulk_size} PVCs")

        pvc_objs, yaml_creation_dir = helpers.create_multiple_pvcs(
            sc_name=self.sc_obj.name,
            namespace=self.namespace,
            number_of_pvc=bulk_size,
            size=self.pvc_size,
            burst=True,
        )
        logging.info(f"PVC creation dir is {yaml_creation_dir}")

        for pvc_obj in pvc_objs:
            pvc_obj.reload()
            teardown_factory(pvc_obj)
        with ThreadPoolExecutor(max_workers=5) as executor:
            for pvc_obj in pvc_objs:
                executor.submit(helpers.wait_for_resource_state, pvc_obj,
                                constants.STATUS_BOUND)
                executor.submit(pvc_obj.reload)

        start_time = helpers.get_provision_time(self.interface,
                                                pvc_objs,
                                                status="start")
        end_time = helpers.get_provision_time(self.interface,
                                              pvc_objs,
                                              status="end")
        total_time = (end_time - start_time).total_seconds()
        logging.info(
            f"{bulk_size} Bulk PVCs creation time is {total_time} seconds.")

        if total_time > bulk_creation_time_limit:
            raise ex.PerformanceException(
                f"{bulk_size} Bulk PVCs creation time is {total_time} and "
                f"greater than {bulk_creation_time_limit} seconds")

        pv_names_list = []
        for pvc_obj in pvc_objs:
            pv_names_list.append(pvc_obj.backed_pv)

        logging.info(f"Starting to delete bulk of {bulk_size} PVCs")
        helpers.delete_bulk_pvcs(yaml_creation_dir,
                                 pv_names_list,
                                 namespace=self.namespace)
        logging.info(
            f"Deletion of bulk of {bulk_size} PVCs successfully completed")

        log_deletion_times = helpers.measure_pv_deletion_time_bulk(
            self.interface, pv_names_list, return_log_times=True)

        all_start_times = [
            a_tuple[0] for a_tuple in log_deletion_times.values()
        ]
        bulk_start_time = sorted(all_start_times)[0]  # the eariles start time
        start_deletion_time = datetime.datetime.strptime(
            bulk_start_time, helpers.DATE_TIME_FORMAT)

        all_end_times = [a_tuple[1] for a_tuple in log_deletion_times.values()]
        bulk_deletion_time = sorted(all_end_times)[-1]  # the latest end time
        end_deletion_time = datetime.datetime.strptime(
            bulk_deletion_time, helpers.DATE_TIME_FORMAT)

        total_deletion_time = (end_deletion_time -
                               start_deletion_time).total_seconds()
        logging.info(
            f"{bulk_size} Bulk PVCs deletion time is {total_deletion_time} seconds."
        )

        # Produce ES report
        # Collecting environment information
        self.get_env_info()

        # Initialize the results doc file.
        full_results = self.init_full_results(
            ResultsAnalyse(
                self.uuid,
                self.crd_data,
                self.full_log_path,
                "bulk_creation_deletion_measurement",
            ))

        full_results.add_key("interface", self.interface)
        full_results.add_key("bulk_size", bulk_size)
        full_results.add_key("pvc_size", self.pvc_size)
        full_results.add_key("bulk_pvc_creation_time", total_time)
        full_results.add_key("bulk_pvc_deletion_time", total_deletion_time)

        # Write the test results into the ES server
        full_results.es_write()
示例#3
0
    def test_bulk_pvc_creation_deletion_measurement_performance(
            self, teardown_factory, bulk_size):
        """
        Measuring PVC creation and deletion time of bulk_size PVCs

        Args:
            teardown_factory: A fixture used when we want a new resource that was created during the tests
                               to be removed in the teardown phase.
            bulk_size: Size of the bulk to be tested
        Returns:

        """
        bulk_creation_time_limit = bulk_size / 2
        log.info(f"Start creating new {bulk_size} PVCs")

        pvc_objs, yaml_creation_dir = helpers.create_multiple_pvcs(
            sc_name=self.sc_obj.name,
            namespace=defaults.ROOK_CLUSTER_NAMESPACE,
            number_of_pvc=bulk_size,
            size=self.pvc_size,
            burst=True,
        )
        logging.info(f"PVC creation dir is {yaml_creation_dir}")

        for pvc_obj in pvc_objs:
            pvc_obj.reload()
            teardown_factory(pvc_obj)
        with ThreadPoolExecutor(max_workers=5) as executor:
            for pvc_obj in pvc_objs:
                executor.submit(helpers.wait_for_resource_state, pvc_obj,
                                constants.STATUS_BOUND)

                executor.submit(pvc_obj.reload)

        start_time = helpers.get_provision_time(self.interface,
                                                pvc_objs,
                                                status="start")
        end_time = helpers.get_provision_time(self.interface,
                                              pvc_objs,
                                              status="end")
        total_time = (end_time - start_time).total_seconds()
        logging.info(
            f"{bulk_size} Bulk PVCs creation time is {total_time} seconds.")

        if total_time > bulk_creation_time_limit:
            raise ex.PerformanceException(
                f"{bulk_size} Bulk PVCs creation time is {total_time} and "
                f"greater than {bulk_creation_time_limit} seconds")

        pv_names_list = []
        for pvc_obj in pvc_objs:
            pv_names_list.append(pvc_obj.backed_pv)

        logging.info(f"Starting to delete bulk of {bulk_size} PVCs")
        helpers.delete_bulk_pvcs(yaml_creation_dir, pv_names_list)
        logging.info(
            f"Deletion of bulk of {bulk_size} PVCs successfully completed")

        log_deletion_times = helpers.measure_pv_deletion_time_bulk(
            self.interface, pv_names_list, return_log_times=True)

        all_start_times = [
            a_tuple[0] for a_tuple in log_deletion_times.values()
        ]
        bulk_start_time = sorted(all_start_times)[0]  # the eariles start time
        start_deletion_time = datetime.datetime.strptime(
            bulk_start_time, helpers.DATE_TIME_FORMAT)

        all_end_times = [a_tuple[1] for a_tuple in log_deletion_times.values()]
        bulk_deletion_time = sorted(all_end_times)[-1]  # the latest end time
        end_deletion_time = datetime.datetime.strptime(
            bulk_deletion_time, helpers.DATE_TIME_FORMAT)

        total_deletion_time = (end_deletion_time -
                               start_deletion_time).total_seconds()
        logging.info(
            f"{bulk_size} Bulk PVCs deletion time is {total_deletion_time} seconds."
        )