def __init__(self, duts, tester, target, suitename): self.suite_name = suitename self.dut = duts[0] self.duts = duts self.tester = tester self.target = target class_name = self.__class__.__name__ self.logger = getLogger(class_name) self.logger.config_suite(class_name) self._requested_tests = None self.nics = [] drivername = [] execution_path = os.path.dirname(os.path.dirname(__file__)) execution_file = execution_path + '/framework/execution.cfg' execution = open(execution_file, 'r') status = re.findall(r"\n+parameters=nic_type=(.*)", execution.read()) status_nic = status[0].split(":") self.nic = status_nic[0] self.kdriver = self._get_nic_driver(self.nic) self._suite_result = Result() self._suite_result.dut = self.dut.crb['IP'] self._suite_result.target = target self._suite_result.nic = self.nic self._suite_result.test_suite = self.suite_name if self._suite_result is None: raise ValueError("Result object should not None") if load_global_setting(PERF_SETTING) == "yes": self._enable_perf = True else: self._enable_perf = False if load_global_setting(FUNC_SETTING) == "yes": self._enable_func = True else: self._enable_func = False if load_global_setting(DEBUG_SETTING) == "yes": self._enable_debug = True else: self._enable_debug = False if load_global_setting(DEBUG_CASE_SETTING) == "yes": self._debug_case = True else: self._debug_case = False self.drivername = load_global_setting(HOST_DRIVER_SETTING)
def run_all(config_file, pkgName, git, patch, skip_setup, read_cache, project, suite_dir, test_cases, base_dir, output_dir, verbose, virttype, debug, debugcase, re_run, commands): """ Main process of DTS, it will run all test suites in the config file. """ global requested_tests global result global excel_report global json_report global stats_report global log_handler global check_case_inst # save global variable serializer = Serializer() # load check/support case lists check_case_inst = CheckCase() # prepare the output folder if output_dir == '': output_dir = settings.FOLDERS['Output'] if not os.path.exists(output_dir): os.mkdir(output_dir) # add external library exec_file = os.path.realpath(__file__) extra_libs_path = exec_file.replace('framework/dts.py', '') + 'extra_libs' sys.path.insert(1, extra_libs_path) # add python module search path sys.path.append(suite_dir) # enable debug mode if debug is True: settings.save_global_setting(settings.DEBUG_SETTING, 'yes') if debugcase is True: settings.save_global_setting(settings.DEBUG_CASE_SETTING, 'yes') # init log_handler handler if verbose is True: logger.set_verbose() if re_run < 0: re_run = 0 logger.log_dir = output_dir log_handler = getLogger('dts') log_handler.config_execution('dts') # run designated test case requested_tests = test_cases # Read config file dts_cfg_folder = settings.load_global_setting(settings.DTS_CFG_FOLDER) if dts_cfg_folder != '': config_file = dts_cfg_folder + os.sep + config_file config = ConfigParser.SafeConfigParser() load_cfg = config.read(config_file) if len(load_cfg) == 0: raise ConfigParseException(config_file) # parse commands dts_commands = dts_parse_commands(commands) os.environ["TERM"] = "dumb" # change rst output folder rst.path2Result = output_dir # report objects excel_report = ExcelReporter(output_dir + '/test_results.xls') json_report = JSONReporter(output_dir + '/test_results.json') stats_report = StatsReporter(output_dir + '/statistics.txt') result = Result() crbInsts = [] crbs_conf = CrbsConf() crbs = crbs_conf.load_crbs_config() # for all Execution sections for section in config.sections(): dts_parse_param(config, section) # verify if the delimiter is good if the lists are vertical duts, targets, test_suites = dts_parse_config(config, section) for dut in duts: log_handler.info("\nDUT " + dut) # look up in crbs - to find the matching IP for dut in duts: for crb in crbs: if crb['section'] == dut: crbInsts.append(crb) break # only run on the dut in known crbs if len(crbInsts) == 0: log_handler.error(" SKIP UNKNOWN CRB") continue result.dut = duts[0] # init global lock create_parallel_locks(len(duts)) # init dut, tester crb duts, tester = dts_crbs_init(crbInsts, skip_setup, read_cache, project, base_dir, serializer, virttype) tester.set_re_run(re_run) # register exit action atexit.register(quit_execution, duts, tester) check_case_inst.check_dut(duts[0]) # Run DUT prerequisites if dts_run_prerequisties(duts, tester, pkgName, patch, dts_commands, serializer) is False: dts_crbs_exit(duts, tester) continue dts_run_target(duts, tester, targets, test_suites) dts_crbs_exit(duts, tester) save_all_results()
def __init__(self, duts, tester, target, suitename): self.suite_name = suitename self.dut = duts[0] self.duts = duts self.tester = tester self.target = target # local variable self._requested_tests = None # check session and reconnect if possible for dutobj in self.duts: self._check_and_reconnect(crb=dutobj) self._check_and_reconnect(crb=self.tester) # covert netdevice to codename self.nics = [] for portid in range(len(self.dut.ports_info)): nic_type = self.dut.ports_info[portid]['type'] self.nics.append(get_nic_name(nic_type)) if len(self.nics): self.nic = self.nics[0] else: self.nic = '' self.kdriver = self._get_nic_driver(self.nic) # result object for save suite result self._suite_result = Result() self._suite_result.dut = self.dut.crb['IP'] self._suite_result.target = target self._suite_result.nic = self.nic self._suite_result.test_suite = self.suite_name if self._suite_result is None: raise ValueError("Result object should not None") # load running enviornment if load_global_setting(PERF_SETTING) == "yes": self._enable_perf = True else: self._enable_perf = False if load_global_setting(FUNC_SETTING) == "yes": self._enable_func = True else: self._enable_func = False if load_global_setting(DEBUG_SETTING) == "yes": self._enable_debug = True else: self._enable_debug = False if load_global_setting(DEBUG_CASE_SETTING) == "yes": self._debug_case = True else: self._debug_case = False self.drivername = load_global_setting(HOST_DRIVER_SETTING) # create rst format report for this suite self._rst_obj = RstReport('rst_report', target, self.nic, self.suite_name, self._enable_perf) # load suite configuration self._suite_conf = SuiteConf(self.suite_name) self._suite_cfg = self._suite_conf.suite_cfg
class TestCase(object): def __init__(self, duts, tester, target, suitename): self.suite_name = suitename self.dut = duts[0] self.duts = duts self.tester = tester self.target = target # local variable self._requested_tests = None # check session and reconnect if possible for dutobj in self.duts: self._check_and_reconnect(crb=dutobj) self._check_and_reconnect(crb=self.tester) # covert netdevice to codename self.nics = [] for portid in range(len(self.dut.ports_info)): nic_type = self.dut.ports_info[portid]['type'] self.nics.append(get_nic_name(nic_type)) if len(self.nics): self.nic = self.nics[0] else: self.nic = '' self.kdriver = self._get_nic_driver(self.nic) # result object for save suite result self._suite_result = Result() self._suite_result.dut = self.dut.crb['IP'] self._suite_result.target = target self._suite_result.nic = self.nic self._suite_result.test_suite = self.suite_name if self._suite_result is None: raise ValueError("Result object should not None") # load running enviornment if load_global_setting(PERF_SETTING) == "yes": self._enable_perf = True else: self._enable_perf = False if load_global_setting(FUNC_SETTING) == "yes": self._enable_func = True else: self._enable_func = False if load_global_setting(DEBUG_SETTING) == "yes": self._enable_debug = True else: self._enable_debug = False if load_global_setting(DEBUG_CASE_SETTING) == "yes": self._debug_case = True else: self._debug_case = False self.drivername = load_global_setting(HOST_DRIVER_SETTING) # create rst format report for this suite self._rst_obj = RstReport('rst_report', target, self.nic, self.suite_name, self._enable_perf) # load suite configuration self._suite_conf = SuiteConf(self.suite_name) self._suite_cfg = self._suite_conf.suite_cfg def init_log(self): # get log handler class_name = self.__class__.__name__ self.logger = getLogger(class_name) self.logger.config_suite(class_name) def _check_and_reconnect(self, crb=None): try: result = crb.session.check_available() except: result = False if result is False: crb.reconnect_session() if 'dut' in str(type(crb)): crb.send_expect("cd %s" % crb.base_dir, "#") try: result = crb.alt_session.check_available() except: result = False if result is False: crb.reconnect_session(alt_session=True) def set_up_all(self): pass def set_up(self): pass def tear_down(self): pass def tear_down_all(self): pass def verify(self, passed, description): if not passed: raise VerifyFailure(description) def _get_nic_driver(self, nic_name): if nic_name in DRIVERS.keys(): return DRIVERS[nic_name] return "Unknown" def set_check_inst(self, check=None, support=None): self._check_inst = check self._support_inst = support def rst_report(self, *args, **kwargs): self._rst_obj.report(*args, **kwargs) def result_table_create(self, header): self._result_table = ResultTable(header) self._result_table.set_rst(self._rst_obj) self._result_table.set_logger(self.logger) def result_table_add(self, row): self._result_table.add_row(row) def result_table_print(self): self._result_table.table_print() def result_table_getrows(self): return self._result_table.results_table_rows def _get_functional_cases(self): """ Get all functional test cases. """ return self._get_test_cases(r'test_(?!perf_)') def _get_performance_cases(self): """ Get all performance test cases. """ return self._get_test_cases(r'test_perf_') def _has_it_been_requested(self, test_case, test_name_regex): """ Check whether test case has been requested for validation. """ name_matches = re.match(test_name_regex, test_case.__name__) if self._requested_tests is not None: return name_matches and test_case.__name__ in self._requested_tests return name_matches def set_requested_cases(self, case_list): """ Pass down input cases list for check """ self._requested_tests = case_list def _get_test_cases(self, test_name_regex): """ Return case list which name matched regex. """ for test_case_name in dir(self): test_case = getattr(self, test_case_name) if callable(test_case) and self._has_it_been_requested(test_case, test_name_regex): yield test_case def execute_setup_all(self): """ Execute suite setup_all function before cases. """ # clear all previous output for dutobj in self.duts: dutobj.get_session_output(timeout=0.1) self.tester.get_session_output(timeout=0.1) try: self.set_up_all() return True except Exception: self.logger.error('set_up_all failed:\n' + traceback.format_exc()) # record all cases blocked if self._enable_func: for case_obj in self._get_functional_cases(): self._suite_result.test_case = case_obj.__name__ self._suite_result.test_case_blocked('set_up_all failed') if self._enable_perf: for case_obj in self._get_performance_cases(): self._suite_result.test_case = case_obj.__name__ self._suite_result.test_case_blocked('set_up_all failed') return False def _execute_test_case(self, case_obj): """ Execute specified test case in specified suite. If any exception occured in validation process, save the result and tear down this case. """ case_name = case_obj.__name__ self._suite_result.test_case = case_obj.__name__ self._rst_obj.write_title("Test Case: " + case_name) # load suite configuration file here for rerun command self._suite_conf = SuiteConf(self.suite_name) self._suite_cfg = self._suite_conf.suite_cfg self._case_cfg = self._suite_conf.load_case_config(case_name) del(self._suite_conf) case_result = True if self._check_inst is not None: if self._check_inst.case_skip(case_name[len("test_"):]): self.logger.info('Test Case %s Result SKIPED:' % case_name) self._rst_obj.write_result("N/A") self._suite_result.test_case_skip(self._check_inst.comments) return case_result if self._support_inst is not None: if not self._support_inst.case_support(case_name[len("test_"):]): self.logger.info('Test Case %s Result SKIPED:' % case_name) self._rst_obj.write_result("N/A") self._suite_result.test_case_skip(self._support_inst.comments) return case_result if self._enable_perf: self._rst_obj.write_annex_title("Annex: " + case_name) try: self.logger.info('Test Case %s Begin' % case_name) self.running_case = case_name # clean session for dutobj in self.duts: dutobj.get_session_output(timeout=0.1) self.tester.get_session_output(timeout=0.1) # run set_up function for each case self.set_up() # run test case case_obj() self._suite_result.test_case_passed() self._rst_obj.write_result("PASS") self.logger.info('Test Case %s Result PASSED:' % case_name) except VerifyFailure as v: case_result = False self._suite_result.test_case_failed(str(v)) self._rst_obj.write_result("FAIL") self.logger.error('Test Case %s Result FAILED: ' % (case_name) + str(v)) except KeyboardInterrupt: self._suite_result.test_case_blocked("Skipped") self.logger.error('Test Case %s SKIPED: ' % (case_name)) self.tear_down() raise KeyboardInterrupt("Stop DTS") except TimeoutException as e: case_result = False self._rst_obj.write_result("FAIL") self._suite_result.test_case_failed(str(e)) self.logger.error('Test Case %s Result FAILED: ' % (case_name) + str(e)) self.logger.error('%s' % (e.get_output())) except Exception: case_result = False trace = traceback.format_exc() self._suite_result.test_case_failed(trace) self.logger.error('Test Case %s Result ERROR: ' % (case_name) + trace) finally: self.tear_down() return case_result def execute_test_cases(self): """ Execute all test cases in one suite. """ # prepare debugger rerun case environment if self._enable_debug or self._debug_case: debugger.AliveSuite = self debugger.AliveModule = __import__('TestSuite_' + self.suite_name) if load_global_setting(FUNC_SETTING) == 'yes': for case_obj in self._get_functional_cases(): for i in range(self.tester.re_run_time + 1): ret = self.execute_test_case(case_obj) if ret is False and self.tester.re_run_time: for dutobj in self.duts: dutobj.get_session_output(timeout = 0.5 * (i + 1)) self.tester.get_session_output(timeout = 0.5 * (i + 1)) time.sleep(i + 1) self.logger.info(" Test case %s failed and re-run %d time" % (case_obj.__name__, i + 1)) else: break if load_global_setting(PERF_SETTING) == 'yes': for case_obj in self._get_performance_cases(): self.execute_test_case(case_obj) def execute_test_case(self, case_obj): """ Execute test case or enter into debug mode. """ debugger.AliveCase = case_obj.__name__ if self._debug_case: self.logger.info("Rerun Test Case %s Begin" % debugger.AliveCase) debugger.keyboard_handle(signal.SIGINT, None) else: return self._execute_test_case(case_obj) def get_result(self): """ Return suite test result """ return self._suite_result def get_case_cfg(self): """ Return case based configuration """ return self._case_cfg def get_suite_cfg(self): """ Return suite based configuration """ return self._suite_cfg def execute_tear_downall(self): """ execute suite tear_down_all function """ try: self.tear_down_all() except Exception: self.logger.error('tear_down_all failed:\n' + traceback.format_exc()) for dutobj in self.duts: dutobj.kill_all() self.tester.kill_all() for dutobj in self.duts: dutobj.virt_exit() # destroy all vfs dutobj.destroy_all_sriov_vfs() def wirespeed(self, nic, frame_size, num_ports): """ Calculate bit rate. It is depended for NICs """ bitrate = 1000.0 # 1Gb ('.0' forces to operate as float) if self.nic == "any" or self.nic == "cfg": driver = self._get_nic_driver(self.dut.ports_info[0]['type']) nic = get_nic_name(self.dut.ports_info[0]['type']) else: driver = self._get_nic_driver(self.nic) nic = self.nic if driver == "ixgbe": bitrate *= 10 # 10 Gb NICs elif nic == "avoton2c5": bitrate *= 2.5 # 2.5 Gb NICs elif nic in ["fortville_spirit", "fortville_spirit_single"]: bitrate *= 40 elif nic == 'fortville_eagle': bitrate *= 10 elif nic == 'fortpark_TLV': bitrate *= 10 elif nic == 'redrockcanyou': bitrate *= 40 elif driver == 'thunder-nicvf': bitrate *= 10 return bitrate * num_ports / 8 / (frame_size + 20)
def run_all(config_file, skip_setup, project, suite_dir, base_dir, output_dir, dpdk_dir): """ Main process of SPDK tests, it will run all test suites in the config file. """ global result global log_handler # save global variable serializer = Serializer() # prepare the output folder if output_dir == '': output_dir = settings.FOLDERS['Output'] if not os.path.exists(output_dir): os.mkdir(output_dir) # add python module search path sys.path.append(suite_dir) sys.path.append(dpdk_dir) logger.log_dir = output_dir log_handler = getLogger('spdk') log_handler.config_execution('spdk') # Read config file config = ConfigParser.SafeConfigParser() load_cfg = config.read(config_file) if len(load_cfg) == 0: raise ConfigParseException(config_file) os.environ["TERM"] = "dumb" # report objects result = Result() crbInsts = [] crbs_conf = CrbsConf() crbs = crbs_conf.load_crbs_config() # for all Exectuion sections for section in config.sections(): spdk_parse_param(config, section) # verify if the delimiter is good if the lists are vertical dutIPs, targets, test_suites = spdk_parse_config(config, section) for dutIP in dutIPs: log_handler.info("\nDUT " + dutIP) # look up in crbs - to find the matching IP for dutIP in dutIPs: for crb in crbs: if crb['IP'] == dutIP: crbInsts.append(crb) break # only run on the dut in known crbs if len(crbInsts) == 0: cwd = os.path.dirname(os.path.dirname(__file__)) path1 = cwd + '/framework/execution.cfg' path2 = cwd + '/framework/crbs.cfg' print " <Target_IP_Address> is", dutIP, "in", path1 log_handler.error(" SKIP UNKNOWN TARGET") if dutIP != '<Target_IP_Address>': print " Please check IP Address information in", path1, "and", path2 continue result.dut = dutIPs[0] # init dut, tester crb duts, tester = spdk_crbs_init(crbInsts, skip_setup, project, base_dir, serializer, dpdk_dir) # register exit action atexit.register(quit_execution, duts, tester) # Run DUT prerequisites if spdk_run_prerequisties(duts, tester, serializer) is False: spdk_crbs_exit(duts, tester) continue spdk_run_target(duts, tester, targets, test_suites) spdk_crbs_exit(duts, tester)
def run_all(config_file, pkgName, git, patch, skip_setup, read_cache, project, suite_dir, test_cases, base_dir, output_dir, verbose, debug): """ Main process of DTS, it will run all test suites in the config file. """ global config global serializer global nic global requested_tests global result global excel_report global stats global log_handler global debug_mode # prepare the output folder if not os.path.exists(output_dir): os.mkdir(output_dir) # add python module search path for folder in FOLDERS.values(): sys.path.append(folder) sys.path.append(suite_dir) # enable debug mode if debug is True: debug_mode = True # init log_handler handler if verbose is True: logger.set_verbose() logger.log_dir = output_dir log_handler = getLogger('dts') log_handler.config_execution('dts') # run designated test case requested_tests = test_cases # Read config file config = ConfigParser.SafeConfigParser() config.read(config_file) # register exit action atexit.register(close_crb_sessions) os.environ["TERM"] = "dumb" serializer = Serializer() # excel report and statistics file result = Result() rst.path2Result = output_dir excel_report = ExcelReporter(output_dir + '/test_results.xls') stats = StatsReporter(output_dir + '/statistics.txt') # for all Exectuion sections for section in config.sections(): dts_parse_param(section) # verify if the delimiter is good if the lists are vertical dutIP, targets, test_suites, nics = dts_parse_config(section) log_handler.info("\nDUT " + dutIP) # look up in crbs - to find the matching IP crbInst = None for crb in crbs: if crb['IP'] == dutIP: crbInst = crb break # only run on the dut in known crbs if crbInst is None: log_handler.error(" SKIP UNKNOWN CRB") continue result.dut = dutIP # init dut, tester crb dts_crbs_init(crbInst, skip_setup, read_cache, project, base_dir, nics) # Run DUT prerequisites if dts_run_prerequisties(pkgName, patch) is False: dts_crbs_exit() continue dts_run_target(crbInst, targets, test_suites, nics) dts_crbs_exit() save_all_results()
def run_all(config_file, pkgName, patch, force_setup, read_cache, project, suite_dir, test_cases, base_dir, output_dir, verbose, virttype, debug, debugcase, re_run, commands, pktgen, test_configs): """ Main process of DTS, it will run all test suites in the config file. """ global requested_tests global result global excel_report global json_report global stats_report global log_handler global check_case_inst # save global variable serializer = Serializer() # load check/support case lists check_case_inst = CheckCase() # prepare the output folder if output_dir == '': output_dir = settings.FOLDERS['Output'] if not os.path.exists(output_dir): os.mkdir(output_dir) # add python module search path sys.path.append(suite_dir) # enable debug mode if debug is True: settings.save_global_setting(settings.DEBUG_SETTING, 'yes') if debugcase is True: settings.save_global_setting(settings.DEBUG_CASE_SETTING, 'yes') # init log_handler handler if verbose is True: logger.set_verbose() if re_run < 0: re_run = 0 logger.log_dir = output_dir log_handler = getLogger('dts') log_handler.config_execution('dts') # run designated test case requested_tests = test_cases # Read config file dts_cfg_folder = settings.load_global_setting(settings.DTS_CFG_FOLDER) if dts_cfg_folder != '': config_file = dts_cfg_folder + os.sep + config_file config = ConfigParser.SafeConfigParser() load_cfg = config.read(config_file) if len(load_cfg) == 0: raise ConfigParseException(config_file) # parse commands dts_commands = dts_parse_commands(commands) os.environ["TERM"] = "dumb" # change rst output folder rst.path2Result = output_dir # report objects excel_report = ExcelReporter(output_dir + '/test_results.xls') json_report = JSONReporter(output_dir + '/test_results.json') stats_report = StatsReporter(output_dir + '/statistics.txt') result = Result() crbInsts = [] crbs_conf = CrbsConf() crbs = crbs_conf.load_crbs_config() # for all Exectuion sections for section in config.sections(): # Skip configuration sections if section in ['DPDK', 'Pktgen', 'Tester_DPDK', 'Tester_Pktgen',\ 'latency', 'reset']: continue dts_parse_param(config, section) # verify if the delimiter is good if the lists are vertical duts, targets, test_suites = dts_parse_config(config, section) # look up in crbs - to find the matching IP for dut in duts: for crb in crbs: if crb['section'] == dut: crbInsts.append(crb) break # only run on the dut in known crbs if len(crbInsts) == 0: log_handler.error(" SKIP UNKNOWN CRB") continue result.dut = duts[0] # init dut, tester crb duts, testers = dts_crbs_init(crbInsts, read_cache, project, base_dir, serializer, virttype, test_configs) for tester in testers: tester.set_re_run(re_run) # register exit action atexit.register(quit_execution, duts, testers) check_case_inst.change_dut(duts[0]) test_configs["force_setup"] = force_setup # Check if set-up is installed on all CRBs: if force_setup is False: setup_ready = True dut_dpdk_repo = parse_repo(dict(config.items("DPDK"))) dut_pktgen_repo = parse_repo(dict(config.items("Pktgen"))) for dut in duts: setup_ready = setup_ready and dut.check_setup( dut_dpdk_repo, dut_pktgen_repo, test_configs["skip_target_env_setup"]) tester_dpdk_repo = parse_repo(dict(config.items("Tester_DPDK")))\ if "Tester_DPDK" in config.sections() else dut_dpdk_repo tester_pktgen_repo = parse_repo(dict(config.items("Tester_Pktgen")))\ if "Tester_Pktgen" in config.sections() else dut_pktgen_repo for tester in testers: setup_ready = setup_ready and tester.check_setup( tester_dpdk_repo, tester_pktgen_repo, test_configs["skip_target_env_setup"]) else: setup_ready = False show_speedup_options_messages(read_cache, setup_ready, test_configs["try_reuse_pcaps"], test_cases) for tester in testers: tester.set_speedup_options(read_cache, setup_ready) for dut in duts: dut.set_speedup_options(read_cache, setup_ready) # Clone DPDK and Pktgen repos and apply patches if not setup_ready: prepare_repos(config, pkgName, pktgen) # Run DUT prerequisites if dts_run_prerequisties(duts, testers, pkgName, patch, dts_commands, serializer, pktgen, test_configs) is False: dts_crbs_exit(duts, testers) continue dts_run_target(duts, testers, targets, test_suites, test_configs) dts_crbs_exit(duts, testers) save_all_results()
class TestCase(object): def __init__(self, duts, tester, target, suitename): self.suite_name = suitename self.dut = duts[0] self.duts = duts self.tester = tester self.target = target class_name = self.__class__.__name__ self.logger = getLogger(class_name) self.logger.config_suite(class_name) self._requested_tests = None self.nics = [] drivername = [] execution_path = os.path.dirname(os.path.dirname(__file__)) execution_file = execution_path + '/framework/execution.cfg' execution = open(execution_file, 'r') status = re.findall(r"\n+parameters=nic_type=(.*)", execution.read()) status_nic = status[0].split(":") self.nic = status_nic[0] self.kdriver = self._get_nic_driver(self.nic) self._suite_result = Result() self._suite_result.dut = self.dut.crb['IP'] self._suite_result.target = target self._suite_result.nic = self.nic self._suite_result.test_suite = self.suite_name if self._suite_result is None: raise ValueError("Result object should not None") if load_global_setting(PERF_SETTING) == "yes": self._enable_perf = True else: self._enable_perf = False if load_global_setting(FUNC_SETTING) == "yes": self._enable_func = True else: self._enable_func = False if load_global_setting(DEBUG_SETTING) == "yes": self._enable_debug = True else: self._enable_debug = False if load_global_setting(DEBUG_CASE_SETTING) == "yes": self._debug_case = True else: self._debug_case = False self.drivername = load_global_setting(HOST_DRIVER_SETTING) def verify(self, passed, description): if not passed: raise VerifyFailure(description) def _get_nic_driver(self, nic_name): if nic_name in DRIVERS.keys(): return DRIVERS[nic_name] return "Unknown" def result_table_create(self, header): self._result_table = ResultTable(header) self._result_table.set_logger(self.logger) def result_table_add(self, row): self._result_table.add_row(row) def result_table_print(self): self._result_table.table_print() def result_table_getrows(self): return self._result_table.results_table_rows def _get_functional_cases(self): """ Get all functional test cases. """ return self._get_test_cases(r'test_(?!perf_)') def _get_performance_cases(self): """ Get all performance test cases. """ return self._get_test_cases(r'test_perf_') def _has_it_been_requested(self, test_case, test_name_regex): """ Check whether test case has been requested for validation. """ name_matches = re.match(test_name_regex, test_case.__name__) if self._requested_tests is not None: return name_matches and test_case.__name__ in self._requested_tests return name_matches def set_requested_cases(self, case_list): """ Pass down input cases list for check """ self._requested_tests = case_list def _get_test_cases(self, test_name_regex): """ Return case list which name matched regex. """ for test_case_name in dir(self): test_case = getattr(self, test_case_name) if callable(test_case) and self._has_it_been_requested( test_case, test_name_regex): yield test_case def execute_setup_all(self): """ Execute suite setup_all function before cases. """ for dutobj in self.duts: dutobj.get_session_output(timeout=0.1) self.tester.get_session_output(timeout=0.1) try: self.set_up_all() return True except Exception: self.logger.error('set_up_all failed:\n' + traceback.format_exc()) if self._enable_func: i = 0 for case_obj in self._get_functional_cases(): case_name = case_obj.__name__ if self._suite_result.test_suite in is_backend: self._suite_result.test_case = [] if re.findall(r'test_a_', case_name): out = re.findall(r'(.*)', case_name) self._suite_result.test_case.append(out[i]) i = i + 1 else: return True else: self._suite_result.test_case = case_obj.__name__ self._suite_result.test_case_blocked('set_up_all failed') if self._enable_perf: for case_obj in self._get_performance_cases(): self._suite_result.test_case = case_obj.__name__ self._suite_result.test_case_blocked('set_up_all failed') return False def _execute_test_case(self, case_obj): """ Execute specified test case in specified suite. If any exception occured in validation process, save the result and tear down this case. """ global excel_report case_name = case_obj.__name__ if self._suite_result.test_suite in is_backend: self._suite_result.test_case = [] i = 0 if re.findall(r'test_a_', case_name): out = re.findall(r'(.*)', case_name) self._suite_result.test_case.append(out[i]) i = i + 1 else: return True else: self._suite_result.test_case = case_obj.__name__ excel_report = ExcelReporter( '../output/test_results_%s.xls' % self._suite_result.test_suite) try: self.logger.info('Test Case %s Begin' % case_name) self.running_case = case_name # clean session for dutobj in self.duts: dutobj.get_session_output(timeout=0.1) self.tester.get_session_output(timeout=0.1) # run set_up function for each case self.set_up() case_obj() self._suite_result.test_case_passed() excel_report.save(self._suite_result) self.logger.info('Test Case %s Result PASSED:' % case_name) except VerifyFailure as v: self._suite_result.test_case_failed(str(v)) excel_report.save(self._suite_result) self.logger.error('Test Case %s Result FAILED: ' % (case_name) + str(v)) except KeyboardInterrupt: self._suite_result.test_case_blocked("Skipped") excel_report.save(self._suite_result) self.logger.error('Test Case %s SKIPED: ' % (case_name)) self.tear_down() raise KeyboardInterrupt("Stop SPDK") except TimeoutException as e: msg = str(e) self._suite_result.test_case_failed(msg) excel_report.save(self._suite_result) self.logger.error('Test Case %s Result FAILED: ' % (case_name) + msg) self.logger.error('%s' % (e.get_output())) except Exception: trace = traceback.format_exc() self._suite_result.test_case_failed(trace) excel_report.save(self._suite_result) self.logger.error('Test Case %s Result ERROR: ' % (case_name) + trace) finally: self.tear_down() def execute_test_cases(self): """ Execute all test cases in one suite. """ if load_global_setting(FUNC_SETTING) == 'yes': for case_obj in self._get_functional_cases(): self._execute_test_case(case_obj) if load_global_setting(PERF_SETTING) == 'yes': for case_obj in self._get_performance_cases(): self._execute_test_case(case_obj) def get_result(self): return self._suite_result def execute_tear_downall(self): pass