def __split_xml_to_set(self, webapi_file): """split xml by <set>""" LOGGER.debug("[ split xml: %s by <set> ]" % webapi_file) LOGGER.debug("[ this might take some time, please wait ]") set_number = 1 test_xml_set_list = [] self.resultfiles.discard(webapi_file) test_xml_temp = etree.parse(webapi_file) for test_xml_temp_suite in test_xml_temp.getiterator('suite'): while set_number <= len(test_xml_temp_suite.getiterator('set')): copy_url = os.path.splitext(webapi_file)[0] copy_url += "_set_%s.xml" % set_number copyfile(webapi_file, copy_url) test_xml_set_list.append(copy_url) self.resultfiles.add(copy_url) set_number += 1 time.sleep(3) set_number -= 1 LOGGER.info("[ total set number is: %s ]" % set_number) # only keep one set in each xml file and remove empty set test_xml_set_list_empty = [] if len(test_xml_set_list) > 1: test_xml_set_list.reverse() for test_xml_set in test_xml_set_list: test_xml_set_tmp = etree.parse(test_xml_set) set_keep_number = 1 # LOGGER.debug("[ process set: %s ]" % test_xml_set) for temp_suite in test_xml_set_tmp.getiterator('suite'): for test_xml_set_temp_set in temp_suite.getiterator('set'): if set_keep_number != set_number: temp_suite.remove(test_xml_set_temp_set) else: if not test_xml_set_temp_set.getiterator('testcase'): test_xml_set_list_empty.append(test_xml_set) set_keep_number += 1 set_number -= 1 test_xml_set_tmp.write(test_xml_set) for empty_set in test_xml_set_list_empty: LOGGER.debug("[ remove empty set: %s ]" % empty_set) test_xml_set_list.remove(empty_set) self.resultfiles.discard(empty_set) if len(test_xml_set_list) > 1: test_xml_set_list.reverse() return test_xml_set_list
def __prepare_external_test_json(self, resultfile): """Run external test""" parameters = {} xml_set_tmp = resultfile # split set_xml by <case> get case parameters LOGGER.debug("[ split xml: %s by <case> ]" % xml_set_tmp) LOGGER.debug("[ this might take some time, please wait ]") try: parse_tree = etree.parse(xml_set_tmp) root_em = parse_tree.getroot() case_tmp = [] for tset in root_em.getiterator('set'): case_order = 1 parameters.setdefault( "casecount", str(len(tset.getiterator('testcase'))) ) parameters.setdefault("current_set_name", xml_set_tmp) for tcase in tset.getiterator('testcase'): case_detail_tmp = {} step_tmp = [] parameters.setdefault( "exetype", tcase.get('execution_type') ) parameters.setdefault("type", tcase.get('type')) case_detail_tmp.setdefault("case_id", tcase.get('id')) case_detail_tmp.setdefault("purpose", tcase.get('purpose')) case_detail_tmp.setdefault("order", str(case_order)) case_detail_tmp.setdefault("onload_delay", "3") if tcase.find('description/test_script_entry') is not None: tc_entry = tcase.find( 'description/test_script_entry').text if not tc_entry: tc_entry = "" case_detail_tmp["entry"] = self.test_prefix + tc_entry if tcase.find( 'description/test_script_entry').get('timeout'): case_detail_tmp["timeout"] = tcase.find( 'description/test_script_entry' ).get('timeout') if tcase.find( 'description/test_script_entry' ).get('test_script_expected_result'): case_detail_tmp["expected_result"] = tcase.find( 'description/test_script_entry' ).get('test_script_expected_result') for this_step in tcase.getiterator("step"): step_detail_tmp = {} step_detail_tmp.setdefault("order", "1") step_detail_tmp["order"] = str(this_step.get('order')) if this_step.find("step_desc") is not None: text = this_step.find("step_desc").text if text is not None: step_detail_tmp["step_desc"] = text if this_step.find("expected") is not None: text = this_step.find("expected").text if text is not None: step_detail_tmp["expected"] = text step_tmp.append(step_detail_tmp) case_detail_tmp['steps'] = step_tmp if tcase.find('description/pre_condition') is not None: text = tcase.find('description/pre_condition').text if text is not None: case_detail_tmp["pre_condition"] = text if tcase.find('description/post_condition') is not None: text = tcase.find('description/post_condition').text if text is not None: case_detail_tmp['post_condition'] = text if tcase.get('onload_delay') is not None: case_detail_tmp[ 'onload_delay'] = tcase.get('onload_delay') # Check performance test if tcase.find('measurement') is not None: measures = tcase.getiterator('measurement') measures_array = [] for measure in measures: measure_json = {} measure_json['name'] = measure.get('name') measure_json['file'] = measure.get('file') measures_array.append(measure_json) case_detail_tmp['measures'] = measures_array case_tmp.append(case_detail_tmp) case_order += 1 parameters.setdefault("cases", case_tmp) if self.bdryrun: parameters.setdefault("dryrun", True) self.set_parameters = parameters except IOError as error: LOGGER.error("[ Error: fail to prepare cases parameters, " "error: %s ]\n" % error) return False return True
def merge_resultfile(self, start_time, latest_dir): """ merge_result_file """ mergefile = mktemp(suffix='.xml', prefix='tests.', dir=latest_dir) mergefile = os.path.splitext(mergefile)[0] mergefile = os.path.splitext(mergefile)[0] mergefile = "%s.result" % BASENAME(mergefile) mergefile = "%s.xml" % mergefile mergefile = JOIN(latest_dir, mergefile) end_time = datetime.today().strftime("%Y-%m-%d_%H_%M_%S") LOGGER.info("\n[ test complete at time: %s ]" % end_time) LOGGER.debug("[ start merging test result xml files, "\ "this might take some time, please wait ]") LOGGER.debug("[ merge result files into %s ]" % mergefile) root = etree.Element('test_definition') root.tail = "\n" totals = set() # merge result files resultfiles = self.resultfiles totals = self.__merge_result(resultfiles, totals) for total in totals: result_xml = etree.parse(total) for suite in result_xml.getiterator('suite'): if suite.getiterator('testcase'): suite.tail = "\n" root.append(suite) # print test summary self.__print_summary() # generate actual xml file LOGGER.info("[ generate result xml: %s ]" % mergefile) if self.skip_all_manual: LOGGER.info("[ some results of core manual cases are N/A," "please refer to the above result file ]") LOGGER.info("[ merge complete, write to the result file," " this might take some time, please wait ]") # get useful info for xml # add environment node # add summary node root.insert(0, get_summary(start_time, end_time)) root.insert(0, self.__get_environment()) # add XSL support to testkit-lite declaration_text = """<?xml version="1.0" encoding="UTF-8"?> <?xml-stylesheet type="text/xsl" href="testresult.xsl"?>\n""" try: with open(mergefile, 'w') as output: output.write(declaration_text) tree = etree.ElementTree(element=root) tree.write(output, xml_declaration=False, encoding='utf-8') except IOError as error: LOGGER.error( "[ Error: merge result file failed, error: %s ]" % error) # change <![CDATA[]]> to <![CDATA[]]> replace_cdata(mergefile) # copy result to -o option try: if self.resultfile: if os.path.splitext(self.resultfile)[-1] == '.xml': if not os.path.exists(os.path.dirname(self.resultfile)): if len(os.path.dirname(self.resultfile)) > 0: os.makedirs(os.path.dirname(self.resultfile)) LOGGER.info("[ copy result xml to output file:" " %s ]" % self.resultfile) copyfile(mergefile, self.resultfile) else: LOGGER.info( "[ Please specify and xml file for result output," " not:%s ]" % self.resultfile) except IOError as error: LOGGER.error("[ Error: fail to copy the result file to: %s," " please check if you have created its parent directory," " error: %s ]" % (self.resultfile, error))