def test_multiple_pvc_creation_after_deletion_performance( self, teardown_factory ): """ Measuring PVC creation time of 75% of initial PVCs (120) in the same rate after deleting 75% of the initial PVCs. Args: teardown_factory: A fixture used when we want a new resource that was created during the tests to be removed in the teardown phase. Returns: """ initial_number_of_pvcs = 120 number_of_pvcs = math.ceil(initial_number_of_pvcs * 0.75) log.info('Start creating new 120 PVCs') pvc_objs = helpers.create_multiple_pvcs( sc_name=self.sc_obj.name, namespace=defaults.ROOK_CLUSTER_NAMESPACE, number_of_pvc=initial_number_of_pvcs, size=self.pvc_size, ) for pvc_obj in pvc_objs: teardown_factory(pvc_obj) with ThreadPoolExecutor() as executor: for pvc_obj in pvc_objs: executor.submit( helpers.wait_for_resource_state, pvc_obj, constants.STATUS_BOUND ) executor.submit(pvc_obj.reload) log.info('Deleting 75% of the PVCs - 90 PVCs') assert pvc.delete_pvcs(pvc_objs[:number_of_pvcs], True), ( "Deletion of 75% of PVCs failed" ) log.info('Re-creating the 90 PVCs') pvc_objs = helpers.create_multiple_pvcs( sc_name=self.sc_obj.name, namespace=defaults.ROOK_CLUSTER_NAMESPACE, number_of_pvc=number_of_pvcs, size=self.pvc_size, ) start_time = helpers.get_start_creation_time( self.interface, pvc_objs[0].name ) end_time = helpers.get_end_creation_time( self.interface, pvc_objs[number_of_pvcs - 1].name, ) total = end_time - start_time total_time = total.total_seconds() if total_time > 45: raise ex.PerformanceException( f"{number_of_pvcs} PVCs creation (after initial deletion of " f"75%) time is {total_time} and greater than 45 seconds" ) logging.info( f"{number_of_pvcs} PVCs creation time took less than a 45 seconds" )
def create_pvcs(request): """ Create multiple PVCs """ class_instance = request.node.cls def finalizer(): """ Delete multiple PVCs """ if hasattr(class_instance, 'pvc_objs'): for pvc_obj in class_instance.pvc_objs: pvc_obj.reload() backed_pv_name = pvc_obj.backed_pv pvc_obj.delete() for pvc_obj in class_instance.pvc_objs: pvc_obj.ocp.wait_for_delete(pvc_obj.name) helpers.validate_pv_delete(backed_pv_name) request.addfinalizer(finalizer) class_instance.pvc_objs = helpers.create_multiple_pvcs( sc_name=class_instance.sc_obj.name, number_of_pvc=class_instance.num_of_pvcs, size=class_instance.pvc_size, namespace=class_instance.namespace) for pvc_obj in class_instance.pvc_objs: helpers.wait_for_resource_state(pvc_obj, constants.STATUS_BOUND) pvc_obj.reload()
def test_multiple_pvc_concurrent_creation_deletion(self): """ To exercise resource creation and deletion """ # Start deleting 100 PVCs command = (f'for i in `seq 1 {self.number_of_pvc}`;do oc delete pvc ' f'{self.pvc_base_name}$i -n {self.namespace};done') proc = run_async(command) assert proc, ( f'Failed to execute command for deleting {self.number_of_pvc} PVCs' ) # Create 100 PVCs pvc_objs = create_multiple_pvcs(sc_name=self.sc_obj.name, namespace=self.namespace, number_of_pvc=self.number_of_pvc) log.info(f'Created {self.number_of_pvc} new PVCs.') self.pvc_objs_new = pvc_objs[:] # Verify PVCs are Bound for pvc in self.pvc_objs_new: pvc.reload() assert pvc.status == constants.STATUS_BOUND, ( f'PVC {pvc.name} is not Bound') log.info('Verified: Newly created PVCs are in Bound state.') # Verify command to delete PVCs ret, out, err = proc.async_communicate() log.info( f'Return values of command: {command}.\nretcode:{ret}\nstdout:' f'{out}\nstderr:{err}') assert not ret, 'Deletion of PVCs failed' # Verify PVCs are deleted for pvc in self.pvc_objs_initial: try: pvc.get() return False except exceptions.CommandFailed as exp: assert "not found" in str(exp), ( f'Failed to fetch details of PVC {pvc.name}') log.info(f'Expected: PVC {pvc.name} does not exists ') log.info(f'Successfully deleted initial {self.number_of_pvc} PVCs') # Verify PVs using ceph toolbox. PVs should be deleted because # reclaimPolicy is Delete ceph_cmd = f'rbd ls -p {self.cbp_obj.name}' ct_pod = get_ceph_tools_pod() final_pv_list = ct_pod.exec_ceph_cmd(ceph_cmd=ceph_cmd, format='json') assert not any(pv in final_pv_list for pv in self.initial_pvs), ( 'PVs associated with deleted PVCs still exists') log.info('Verified: PVs associated with deleted PVCs are also deleted')
def test_multiple_pvc_creation_measurement_performance( self, teardown_factory ): """ Measuring PVC creation time of 120 PVCs in 180 seconds Args: teardown_factory: A fixture used when we want a new resource that was created during the tests to be removed in the teardown phase. Returns: """ number_of_pvcs = 120 log.info('Start creating new 120 PVCs') pvc_objs = helpers.create_multiple_pvcs( sc_name=self.sc_obj.name, namespace=defaults.ROOK_CLUSTER_NAMESPACE, number_of_pvc=number_of_pvcs, size=self.pvc_size, burst=True ) for pvc_obj in pvc_objs: pvc_obj.reload() teardown_factory(pvc_obj) with ThreadPoolExecutor(max_workers=5) as executor: for pvc_obj in pvc_objs: executor.submit( helpers.wait_for_resource_state, pvc_obj, constants.STATUS_BOUND ) executor.submit(pvc_obj.reload) start_time = helpers.get_provision_time( self.interface, pvc_objs, status='start' ) end_time = helpers.get_provision_time( self.interface, pvc_objs, status='end' ) total = end_time - start_time total_time = total.total_seconds() if total_time > 180: raise ex.PerformanceException( f"{number_of_pvcs} PVCs creation time is {total_time} and " f"greater than 180 seconds" ) logging.info( f"{number_of_pvcs} PVCs creation time took {total_time} seconds" )
def test_multiple_pvc_creation_measurement_performance( self, teardown_factory ): """ Measuring PVC creation time of 120 PVCs in 180 seconds """ number_of_pvcs = 120 log.info('Start creating new 120 PVCs') pvc_objs = helpers.create_multiple_pvcs( sc_name=self.sc_obj.name, namespace=defaults.ROOK_CLUSTER_NAMESPACE, number_of_pvc=number_of_pvcs, size=self.pvc_size, ) for pvc_obj in pvc_objs: pvc_obj.reload() teardown_factory(pvc_obj) with ThreadPoolExecutor(max_workers=5) as executor: for pvc_obj in pvc_objs: executor.submit( helpers.wait_for_resource_state, pvc_obj, constants.STATUS_BOUND ) executor.submit(pvc_obj.reload) start_time = helpers.get_start_creation_time( self.interface, pvc_objs[0].name ) end_time = helpers.get_end_creation_time( self.interface, pvc_objs[number_of_pvcs - 1].name, ) total = end_time - start_time total_time = total.total_seconds() if total_time > 180: raise ex.PerformanceException( f"{number_of_pvcs} PVCs creation time is {total_time} and " f"greater than 180 seconds" ) logging.info( f"{number_of_pvcs} PVCs creation time took {total_time} seconds" )
def test_multiple_pvc_concurrent_creation_deletion(self): """ To exercise resource creation and deletion """ executor = ThreadPoolExecutor(max_workers=1) # Start deleting 100 PVCs log.info('Start deleting PVCs.') pvc_delete = executor.submit(delete_pvcs, self.pvc_objs) # Create 100 PVCs log.info('Start creating new PVCs') new_pvc_objs = create_multiple_pvcs(sc_name=self.sc_obj.name, namespace=self.namespace, number_of_pvc=self.num_of_pvcs) for pvc_obj in new_pvc_objs: wait_for_resource_state(pvc_obj, constants.STATUS_BOUND) pvc_obj.reload() log.info(f'Newly created {self.num_of_pvcs} PVCs are in Bound state.') self.pvc_objs_new.extend(new_pvc_objs) # Verify PVCs are deleted res = pvc_delete.result() assert res, 'Deletion of PVCs failed' log.info('PVC deletion was successful.') # Clear pvc_objs list to avoid error in 'create_pvcs' fixture self.pvc_objs.clear() # Verify PVCs are deleted for pvc in self.pvc_objs: try: pvc.get() return False except exceptions.CommandFailed as exp: assert "not found" in str(exp), ( f'Failed to fetch details of PVC {pvc.name}') log.info(f'Expected: PVC {pvc.name} does not exists') log.info(f'Successfully deleted initial {self.num_of_pvcs} PVCs')
def create_pvcs(request): """ Create multiple PVCs """ class_instance = request.node.cls def finalizer(): """ Delete multiple PVCs """ if hasattr(class_instance, 'pvc_objs'): for pvc_obj in class_instance.pvc_objs: pvc_obj.delete() for pvc_obj in class_instance.pvc_objs: pvc_obj.ocp.wait_for_delete(pvc_obj.name) request.addfinalizer(finalizer) class_instance.pvc_objs = helpers.create_multiple_pvcs( sc_name=class_instance.sc_obj.name, number_of_pvc=class_instance.num_of_pvcs, size=class_instance.pvc_size, namespace=class_instance.namespace)
def setup(self): """ Create new project Create PVCs """ # Create new project self.namespace = create_unique_resource_name('test', 'namespace') self.project_obj = ocp.OCP(kind='Project', namespace=self.namespace) assert self.project_obj.new_project( self.namespace), (f'Failed to create new project {self.namespace}') # Create 100 PVCs pvc_objs = create_multiple_pvcs(sc_name=self.sc_obj.name, namespace=self.namespace, number_of_pvc=self.number_of_pvc) log.info(f'Created initial {self.number_of_pvc} PVCs') self.pvc_objs_initial = pvc_objs[:] # Verify PVCs are Bound and fetch PV names for pvc in self.pvc_objs_initial: pvc.reload() assert pvc.status == constants.STATUS_BOUND, ( f'PVC {pvc.name} is not Bound') self.initial_pvs.append(pvc.backed_pv) log.info(f'Initial {self.number_of_pvc} PVCs are in Bound state')
def test_multiple_pvc_creation_deletion_scale(self, namespace, access_mode, interface): """ Measuring PVC creation time while scaling PVC Measure PVC deletion time after creation test """ number_of_pvc = 1500 log.info( f"Start creating {access_mode}-{interface} {number_of_pvc} PVC") if interface == constants.CEPHBLOCKPOOL: self.sc_obj = constants.DEFAULT_STORAGECLASS_RBD elif interface == constants.CEPHFS_INTERFACE: self.sc_obj = constants.DEFAULT_STORAGECLASS_CEPHFS # Create PVC pvc_objs = helpers.create_multiple_pvcs( sc_name=self.sc_obj, namespace=self.namespace, number_of_pvc=number_of_pvc, size=f"{random.randrange(5, 105, 5)}Gi", access_mode=access_mode) # Check for PVC status using threads threads = list() for obj in pvc_objs: process = threading.Thread(target=helpers.wait_for_resource_state, args=( obj, constants.STATUS_BOUND, )) process.start() threads.append(process) for process in threads: process.join() # Get pvc_name, require pvc_name to fetch creation time data from log threads = list() for pvc_obj in pvc_objs: process = threading.Thread(target=pvc_obj.reload) process.start() threads.append(process) for process in threads: process.join() pvc_name_list, pv_name_list = ([] for i in range(2)) threads = list() for pvc_obj in pvc_objs: process1 = threading.Thread( target=pvc_name_list.append(pvc_obj.name)) process2 = threading.Thread( target=pv_name_list.append(pvc_obj.backed_pv)) process1.start() process2.start() threads.append(process1) threads.append(process2) for process in threads: process.join() # Get PVC creation time pvc_create_time = helpers.measure_pvc_creation_time_bulk( interface=interface, pvc_name_list=pvc_name_list) # TODO: Update below code with google API, to record value in spreadsheet # TODO: For now observing Google API limit to write more than 100 writes log_path = f"{ocsci_log_path()}/{self.sc_obj}-{access_mode}" with open(f"{log_path}-creation-time.csv", "w") as fd: csv_obj = csv.writer(fd) for k, v in pvc_create_time.items(): csv_obj.writerow([k, v]) logging.info( f"Create data present in {log_path}-creation-time.csv file") # Delete PVC for obj in pvc_objs: obj.delete() obj.ocp.wait_for_delete(obj.name) # Get PVC deletion time pvc_deletion_time = helpers.measure_pv_deletion_time_bulk( interface=interface, pv_name_list=pv_name_list) # Update result to csv file. # TODO: Update below code with google API, to record value in spreadsheet # TODO: For now observing Google API limit to write more than 100 writes with open(f"{log_path}-deletion-time.csv", "w") as fd: csv_obj = csv.writer(fd) for k, v in pvc_deletion_time.items(): csv_obj.writerow([k, v]) logging.info( f"Delete data present in {log_path}-deletion-time.csv file")
def test_all_4_type_pvc_creation_deletion_scale(self, namespace): """ Measuring PVC creation time while scaling PVC of all 4 types, Total 1500 PVCs will be created, i.e. 375 each pvc type Measure PVC deletion time in scale env """ number_of_pvc = 375 log.info(f"Start creating {number_of_pvc} PVC of all 4 types") cephfs_sc_obj = constants.DEFAULT_STORAGECLASS_CEPHFS rbd_sc_obj = constants.DEFAULT_STORAGECLASS_RBD # Create all 4 types of PVC fs_pvc_obj, rbd_pvc_obj = ([] for i in range(2)) for mode in [constants.ACCESS_MODE_RWO, constants.ACCESS_MODE_RWX]: fs_pvc_obj.extend( helpers.create_multiple_pvcs( sc_name=cephfs_sc_obj, namespace=self.namespace, number_of_pvc=number_of_pvc, size=f"{random.randrange(5, 105, 5)}Gi", access_mode=mode)) rbd_pvc_obj.extend( helpers.create_multiple_pvcs( sc_name=rbd_sc_obj, namespace=self.namespace, number_of_pvc=number_of_pvc, size=f"{random.randrange(5, 105, 5)}Gi", access_mode=mode)) # Check for PVC status using threads threads = list() for obj in fs_pvc_obj: process = threading.Thread(target=helpers.wait_for_resource_state, args=( obj, constants.STATUS_BOUND, )) process.start() threads.append(process) for obj in rbd_pvc_obj: process = threading.Thread(target=helpers.wait_for_resource_state, args=( obj, constants.STATUS_BOUND, )) process.start() threads.append(process) for process in threads: process.join() # Get pvc_name, require pvc_name to fetch creation time data from log threads = list() for fs_obj, rbd_obj in zip(fs_pvc_obj, rbd_pvc_obj): process1 = threading.Thread(target=fs_obj.reload) process2 = threading.Thread(target=rbd_obj.reload) process1.start() process2.start() threads.append(process1) threads.append(process2) for process in threads: process.join() fs_pvc_name, rbd_pvc_name = ([] for i in range(2)) fs_pv_name, rbd_pv_name = ([] for i in range(2)) threads = list() for fs_obj, rbd_obj in zip(fs_pvc_obj, rbd_pvc_obj): process1 = threading.Thread(target=fs_pvc_name.append(fs_obj.name)) process2 = threading.Thread( target=rbd_pvc_name.append(rbd_obj.name)) process3 = threading.Thread( target=fs_pv_name.append(fs_obj.backed_pv)) process4 = threading.Thread( target=rbd_pv_name.append(rbd_obj.backed_pv)) process1.start() process2.start() process3.start() process4.start() threads.append(process1) threads.append(process2) threads.append(process3) threads.append(process4) for process in threads: process.join() # Get PVC creation time fs_pvc_create_time = helpers.measure_pvc_creation_time_bulk( interface=constants.CEPHFS_INTERFACE, pvc_name_list=fs_pvc_name) rbd_pvc_create_time = helpers.measure_pvc_creation_time_bulk( interface=constants.CEPHBLOCKPOOL, pvc_name_list=rbd_pvc_name) fs_pvc_create_time.update(rbd_pvc_create_time) # TODO: Update below code with google API, to record value in spreadsheet # TODO: For now observing Google API limit to write more than 100 writes log_path = f"{ocsci_log_path()}/All-type-PVC" with open(f"{log_path}-creation-time.csv", "w") as fd: csv_obj = csv.writer(fd) for k, v in fs_pvc_create_time.items(): csv_obj.writerow([k, v]) logging.info( f"Create data present in {log_path}-creation-time.csv file") # Delete PVC pvc_objs = fs_pvc_obj + rbd_pvc_obj for obj in pvc_objs: obj.delete() obj.ocp.wait_for_delete(obj.name) # Get PVC deletion time fs_pvc_deletion_time = helpers.measure_pv_deletion_time_bulk( interface=constants.CEPHFS_INTERFACE, pv_name_list=fs_pv_name) rbd_pvc_deletion_time = helpers.measure_pv_deletion_time_bulk( interface=constants.CEPHBLOCKPOOL, pv_name_list=rbd_pv_name) fs_pvc_deletion_time.update(rbd_pvc_deletion_time) # TODO: Update below code with google API, to record value in spreadsheet # TODO: For now observing Google API limit to write more than 100 writes with open(f"{log_path}-deletion-time.csv", "w") as fd: csv_obj = csv.writer(fd) for k, v in fs_pvc_deletion_time.items(): csv_obj.writerow([k, v]) logging.info( f"Delete data present in {log_path}-deletion-time.csv file")
def test_multiple_pvc_deletion_measurement_performance( self, teardown_factory): """ Measuring PVC deletion time of 120 PVCs in 180 seconds Args: teardown_factory: A fixture used when we want a new resource that was created during the tests to be removed in the teardown phase. Returns: """ number_of_pvcs = 120 pvc_size = '1Gi' log.info('Start creating new 120 PVCs') pvc_objs = helpers.create_multiple_pvcs( sc_name=self.sc_obj.name, namespace=defaults.ROOK_CLUSTER_NAMESPACE, number_of_pvc=number_of_pvcs, size=pvc_size, ) for pvc_obj in pvc_objs: pvc_obj.reload() teardown_factory(pvc_obj) with ThreadPoolExecutor(max_workers=5) as executor: for pvc_obj in pvc_objs: executor.submit(helpers.wait_for_resource_state, pvc_obj, constants.STATUS_BOUND) executor.submit(pvc_obj.reload) # Get pvc_name, require pvc_name to fetch deletion time data from log threads = list() for pvc_obj in pvc_objs: process = threading.Thread(target=pvc_obj.reload) process.start() threads.append(process) for process in threads: process.join() pvc_name_list, pv_name_list = ([] for i in range(2)) threads = list() for pvc_obj in pvc_objs: process1 = threading.Thread( target=pvc_name_list.append(pvc_obj.name)) process2 = threading.Thread( target=pv_name_list.append(pvc_obj.backed_pv)) process1.start() process2.start() threads.append(process1) threads.append(process2) for process in threads: process.join() log.info("Preparing to delete 120 PVC") # Delete PVC for obj in pvc_objs: obj.delete() for obj in pvc_objs: obj.ocp.wait_for_delete(obj.name) # Get PVC deletion time pvc_deletion_time = helpers.measure_pv_deletion_time_bulk( interface=self.interface, pv_name_list=pv_name_list) logging.info( f"{number_of_pvcs} PVCs deletion time took {pvc_deletion_time}")