def do_parallel_task(self): """ Do configured tasks in parallel, will return if all tasks finished """ # set parallel mode save_global_setting(DTS_PARALLEL_SETTING, 'yes') self.pool_result = [dict() for _ in self.duts] for req in self._pool_requests: self.pool.putRequest(req) self.logger.info("All parallel tasks start at %s" % time.ctime()) # clean the request queue self._pool_requests = list() while True: try: time.sleep(0.5) self.pool.poll() except threadpool.NoResultsPending: self.logger.info("All parallel tasks have been done at %s" % time.ctime()) break except Exception as e: self.logger.error("Met exception %s" % (str(e))) break # clear pool related queues, clean thread self.pool._requests_queue.queue.clear() self.pool._results_queue.queue.clear() time.sleep(2) # exit from parallel mode save_global_setting(DTS_PARALLEL_SETTING, 'no')
def dts_parse_config(config, section): """ Parse execution file configuration. """ duts = [dut_.strip() for dut_ in config.get(section, 'crbs').split(',')] targets = [ target.strip() for target in config.get(section, 'targets').split(',') ] test_suites = [ suite.strip() for suite in config.get(section, 'test_suites').split(',') ] try: rx_mode = config.get(section, 'rx_mode').strip() except: rx_mode = 'default' settings.save_global_setting(settings.DPDK_RXMODE_SETTING, rx_mode) for suite in test_suites: if suite == '': test_suites.remove(suite) return duts, targets, test_suites
def dts_parse_param(config, section): """ Parse execution file parameters. """ # default value performance = False functional = False # Set parameters parameters = config.get(section, 'parameters').split(':') drivername = config.get(section, 'drivername').split('=')[-1] driver = drivername.split(':') if len(driver) == 2: drivername = driver[0] drivermode = driver[1] settings.save_global_setting(settings.HOST_DRIVER_MODE_SETTING, drivermode) else: drivername = driver[0] settings.save_global_setting(settings.HOST_DRIVER_SETTING, drivername) paramDict = dict() for param in parameters: (key, _, value) = param.partition('=') paramDict[key] = value if 'perf' in paramDict and paramDict['perf'] == 'true': performance = True if 'func' in paramDict and paramDict['func'] == 'true': functional = True if 'nic_type' not in paramDict: paramDict['nic_type'] = 'any' settings.save_global_setting(settings.HOST_NIC_SETTING, paramDict['nic_type']) # save perf/function setting in environment if performance: settings.save_global_setting(settings.PERF_SETTING, 'yes') else: settings.save_global_setting(settings.PERF_SETTING, 'no') if functional: settings.save_global_setting(settings.FUNC_SETTING, 'yes') else: settings.save_global_setting(settings.FUNC_SETTING, 'no')
def run_all(config_file, pkgName, git, patch, skip_setup, read_cache, project, suite_dir, test_cases, base_dir, output_dir, verbose, virttype, debug, debugcase, re_run, commands): """ Main process of DTS, it will run all test suites in the config file. """ global requested_tests global result global excel_report global json_report global stats_report global log_handler global check_case_inst # save global variable serializer = Serializer() # load check/support case lists check_case_inst = CheckCase() # prepare the output folder if output_dir == '': output_dir = settings.FOLDERS['Output'] if not os.path.exists(output_dir): os.mkdir(output_dir) # add external library exec_file = os.path.realpath(__file__) extra_libs_path = exec_file.replace('framework/dts.py', '') + 'extra_libs' sys.path.insert(1, extra_libs_path) # add python module search path sys.path.append(suite_dir) # enable debug mode if debug is True: settings.save_global_setting(settings.DEBUG_SETTING, 'yes') if debugcase is True: settings.save_global_setting(settings.DEBUG_CASE_SETTING, 'yes') # init log_handler handler if verbose is True: logger.set_verbose() if re_run < 0: re_run = 0 logger.log_dir = output_dir log_handler = getLogger('dts') log_handler.config_execution('dts') # run designated test case requested_tests = test_cases # Read config file dts_cfg_folder = settings.load_global_setting(settings.DTS_CFG_FOLDER) if dts_cfg_folder != '': config_file = dts_cfg_folder + os.sep + config_file config = ConfigParser.SafeConfigParser() load_cfg = config.read(config_file) if len(load_cfg) == 0: raise ConfigParseException(config_file) # parse commands dts_commands = dts_parse_commands(commands) os.environ["TERM"] = "dumb" # change rst output folder rst.path2Result = output_dir # report objects excel_report = ExcelReporter(output_dir + '/test_results.xls') json_report = JSONReporter(output_dir + '/test_results.json') stats_report = StatsReporter(output_dir + '/statistics.txt') result = Result() crbInsts = [] crbs_conf = CrbsConf() crbs = crbs_conf.load_crbs_config() # for all Execution sections for section in config.sections(): dts_parse_param(config, section) # verify if the delimiter is good if the lists are vertical duts, targets, test_suites = dts_parse_config(config, section) for dut in duts: log_handler.info("\nDUT " + dut) # look up in crbs - to find the matching IP for dut in duts: for crb in crbs: if crb['section'] == dut: crbInsts.append(crb) break # only run on the dut in known crbs if len(crbInsts) == 0: log_handler.error(" SKIP UNKNOWN CRB") continue result.dut = duts[0] # init global lock create_parallel_locks(len(duts)) # init dut, tester crb duts, tester = dts_crbs_init(crbInsts, skip_setup, read_cache, project, base_dir, serializer, virttype) tester.set_re_run(re_run) # register exit action atexit.register(quit_execution, duts, tester) check_case_inst.check_dut(duts[0]) # Run DUT prerequisites if dts_run_prerequisties(duts, tester, pkgName, patch, dts_commands, serializer) is False: dts_crbs_exit(duts, tester) continue dts_run_target(duts, tester, targets, test_suites) dts_crbs_exit(duts, tester) save_all_results()
def spdk_parse_param(config, section): """ Parse execution file parameters. """ performance = False functional = False parameters = config.get(section, 'parameters').split(':') drivername = config.get(section, 'drivername').split('=')[-1] settings.save_global_setting(settings.HOST_DRIVER_SETTING, drivername) paramDict = dict() for param in parameters: (key, _, value) = param.partition('=') paramDict[key] = value if 'perf' in paramDict and paramDict['perf'] == 'true': performance = True if 'func' in paramDict and paramDict['func'] == 'true': functional = True if 'nic_type' not in paramDict: paramDict['nic_type'] = 'any' settings.save_global_setting(settings.HOST_NIC_SETTING, paramDict['nic_type']) if performance: settings.save_global_setting(settings.PERF_SETTING, 'yes') else: settings.save_global_setting(settings.PERF_SETTING, 'no') if functional: settings.save_global_setting(settings.FUNC_SETTING, 'yes') else: settings.save_global_setting(settings.FUNC_SETTING, 'no')
def run_all(config_file, pkgName, patch, force_setup, read_cache, project, suite_dir, test_cases, base_dir, output_dir, verbose, virttype, debug, debugcase, re_run, commands, pktgen, test_configs): """ Main process of DTS, it will run all test suites in the config file. """ global requested_tests global result global excel_report global json_report global stats_report global log_handler global check_case_inst # save global variable serializer = Serializer() # load check/support case lists check_case_inst = CheckCase() # prepare the output folder if output_dir == '': output_dir = settings.FOLDERS['Output'] if not os.path.exists(output_dir): os.mkdir(output_dir) # add python module search path sys.path.append(suite_dir) # enable debug mode if debug is True: settings.save_global_setting(settings.DEBUG_SETTING, 'yes') if debugcase is True: settings.save_global_setting(settings.DEBUG_CASE_SETTING, 'yes') # init log_handler handler if verbose is True: logger.set_verbose() if re_run < 0: re_run = 0 logger.log_dir = output_dir log_handler = getLogger('dts') log_handler.config_execution('dts') # run designated test case requested_tests = test_cases # Read config file dts_cfg_folder = settings.load_global_setting(settings.DTS_CFG_FOLDER) if dts_cfg_folder != '': config_file = dts_cfg_folder + os.sep + config_file config = ConfigParser.SafeConfigParser() load_cfg = config.read(config_file) if len(load_cfg) == 0: raise ConfigParseException(config_file) # parse commands dts_commands = dts_parse_commands(commands) os.environ["TERM"] = "dumb" # change rst output folder rst.path2Result = output_dir # report objects excel_report = ExcelReporter(output_dir + '/test_results.xls') json_report = JSONReporter(output_dir + '/test_results.json') stats_report = StatsReporter(output_dir + '/statistics.txt') result = Result() crbInsts = [] crbs_conf = CrbsConf() crbs = crbs_conf.load_crbs_config() # for all Exectuion sections for section in config.sections(): # Skip configuration sections if section in ['DPDK', 'Pktgen', 'Tester_DPDK', 'Tester_Pktgen',\ 'latency', 'reset']: continue dts_parse_param(config, section) # verify if the delimiter is good if the lists are vertical duts, targets, test_suites = dts_parse_config(config, section) # look up in crbs - to find the matching IP for dut in duts: for crb in crbs: if crb['section'] == dut: crbInsts.append(crb) break # only run on the dut in known crbs if len(crbInsts) == 0: log_handler.error(" SKIP UNKNOWN CRB") continue result.dut = duts[0] # init dut, tester crb duts, testers = dts_crbs_init(crbInsts, read_cache, project, base_dir, serializer, virttype, test_configs) for tester in testers: tester.set_re_run(re_run) # register exit action atexit.register(quit_execution, duts, testers) check_case_inst.change_dut(duts[0]) test_configs["force_setup"] = force_setup # Check if set-up is installed on all CRBs: if force_setup is False: setup_ready = True dut_dpdk_repo = parse_repo(dict(config.items("DPDK"))) dut_pktgen_repo = parse_repo(dict(config.items("Pktgen"))) for dut in duts: setup_ready = setup_ready and dut.check_setup( dut_dpdk_repo, dut_pktgen_repo, test_configs["skip_target_env_setup"]) tester_dpdk_repo = parse_repo(dict(config.items("Tester_DPDK")))\ if "Tester_DPDK" in config.sections() else dut_dpdk_repo tester_pktgen_repo = parse_repo(dict(config.items("Tester_Pktgen")))\ if "Tester_Pktgen" in config.sections() else dut_pktgen_repo for tester in testers: setup_ready = setup_ready and tester.check_setup( tester_dpdk_repo, tester_pktgen_repo, test_configs["skip_target_env_setup"]) else: setup_ready = False show_speedup_options_messages(read_cache, setup_ready, test_configs["try_reuse_pcaps"], test_cases) for tester in testers: tester.set_speedup_options(read_cache, setup_ready) for dut in duts: dut.set_speedup_options(read_cache, setup_ready) # Clone DPDK and Pktgen repos and apply patches if not setup_ready: prepare_repos(config, pkgName, pktgen) # Run DUT prerequisites if dts_run_prerequisties(duts, testers, pkgName, patch, dts_commands, serializer, pktgen, test_configs) is False: dts_crbs_exit(duts, testers) continue dts_run_target(duts, testers, targets, test_suites, test_configs) dts_crbs_exit(duts, testers) save_all_results()