def create_application_documentation(self, report_file=None, output_file_name="demo.docx"): """ Create a document (docx) object and read first all ".feature" files and add their contents into the document. If a report file name is provided, it will add a "last execution" section containing the data found in the report. The report file must be the "plain" report output file generated from behave. :param report_file: The report file path (absolute or relative) :param output_file_name : The exported file name by default "demo.docx" :return: None """ self.__document = Document() self.document.add_heading("{}".format(self.__report_title), 0) # Document title self.document.add_page_break() for file in glob.iglob("{}/**/*.feature".format(self.__feature_repository), recursive=True): # Use the iterator as it's cleaner # log.debug() test = parse_file(file) # Use the Behave parser in order to read the feature file self.add_heading(feature=test) self.add_description(feature=test) self.add_background(feature=test) self.add_scenario(feature=test) self.document.add_page_break() if report_file is not None: self.add_report(file=report_file) self.document.save(output_file_name)
def get(_id): file_path = Scenario._get_feature_file_name(_id) feature = behave_parser.parse_file(file_path) code = Scenario._feature_to_string(feature) _id = os.path.basename(file_path).split('.')[0] return Scenario(_id, code)
def get_feature(self, feature_file=None): # Parse a feature file and convert it in a dictionary (self.feature) assert feature_file is not None, "Get feature - Missing file {}".format( feature_file) feature = parse_file( feature_file) # parse feature file into a feature object # get all feature's story / improvement try: story_tags = UpdateFeatureOnJira.return_jira_id_from_list( feature.tags) except AttributeError: log.warning("Error in feature file: {}".format(feature_file)) return feature_file self.feature = { "description": UpdateFeatureOnJira.add_description(feature), "story_tags": ', '.join(story_tags), # could get multiple jira ids "scenarios": [] } # manage case where background/precondition is empty if feature.background is not None: # when background / precondition exist self.feature["precondition"] = feature.background.name[ 1:] # remove @ at the beginning else: self.feature["precondition"] = None # TODO check background VS jira precondition for scenario in feature.scenarios: # get scenario data # get the scenario jira id scenario_id = UpdateFeatureOnJira.return_jira_id_from_list( scenario.tags) # get all labels without jira ids tags = scenario.effective_tags jira_ids = UpdateFeatureOnJira.return_jira_id_from_list( scenario.effective_tags) if len(jira_ids) < 2: log.debug( "There is no jira's id in feature in file: {}".format( feature_file)) return 1 for item in jira_ids: # remove jira ids from tags tags.remove(item) my_scenario = { "scenario_id": str(scenario_id[0]), # could only get 1 jira id "labels": tags, "type": scenario.keyword, "title": "{} - {}".format(feature.name, scenario.name), "scenario": UpdateFeatureOnJira.add_scenario(scenario.steps) } if scenario.keyword == 'Scenario Outline': # When Scenario Outline --> there are examples to add examples = "\n".join( UpdateFeatureOnJira.add_examples(scenario.examples)) my_scenario["scenario"] += "\n\n{}".format(examples) self.feature["scenarios"].append(my_scenario) return 0
def _read_single_file(self, file_path: str): fh = FilesHandler(self.config, file_path) feature = pr.parse_file(file_path) feature_folder_name, feature_link = fh.get_bdd_file_paths() if len(feature.scenarios) == 0: raise LookupError('no scenarios found in .feature file') precondition = feature.background.name if feature.background else '' objective = feature.name for scenario in feature.scenarios: if feature.tags: scenario.tags.extend(feature.tags) parsed_scenario = scenario, precondition, objective, file_path, feature_folder_name, feature_link self.file_contents.append(parsed_scenario)
def run_with_paths(self): self.load_hooks() self.load_step_definitions() context = self.context = Context(self) # -- ENSURE: context.execute_steps() works in weird cases (hooks, ...) self.setup_capture() stream = self.config.output failed = False failed_count = 0 self.run_hook('before_all', context) for filename in self.feature_files(): if self.config.exclude(filename): continue feature = parser.parse_file(os.path.abspath(filename), language=self.config.lang) if not feature: # -- CORNER-CASE: Feature file without any feature(s). continue self.features.append(feature) self.feature = feature self.formatter = formatters.get_formatter(self.config, stream) self.formatter.uri(filename) failed = feature.run(self) if failed: failed_count += 1 self.formatter.close() for reporter in self.config.reporters: reporter.feature(feature) if failed and self.config.stop: break self.run_hook('after_all', context) for reporter in self.config.reporters: reporter.end() failed = (failed_count > 0) return failed
def parse_features(self, feature_files): """ Parse feature files and return list of Feature model objects. :param feature_files: List of feature files to parse. :return: List of feature objects. """ features = [] for filename in feature_files: if self.config.exclude(filename): continue filename2 = os.path.abspath(filename) feature = parser.parse_file(filename2, language=self.config.lang) if not feature: # -- CORNER-CASE: Feature file without any feature(s). continue features.append(feature) return features
def parse_features(feature_files, language=None): """ Parse feature files and return list of Feature model objects. Handles: * feature file names, ala "alice.feature" * feature file locations, ala: "alice.feature:10" :param feature_files: List of feature file names to parse. :param language: Default language to use. :return: List of feature objects. """ scenario_collector = FeatureScenarioLocationCollector() features = [] for location in feature_files: if not isinstance(location, FileLocation): assert isinstance(location, string_types) location = FileLocation(os.path.normpath(location)) if location.filename == scenario_collector.filename: scenario_collector.add_location(location) continue elif scenario_collector.feature: # -- ADD CURRENT FEATURE: As collection of scenarios. current_feature = scenario_collector.build_feature() features.append(current_feature) scenario_collector.clear() # -- NEW FEATURE: assert isinstance(location, FileLocation) filename = os.path.abspath(location.filename) feature = parser.parse_file(filename, language=language) if feature: # -- VALID FEATURE: # SKIP CORNER-CASE: Feature file without any feature(s). scenario_collector.feature = feature scenario_collector.add_location(location) # -- FINALLY: if scenario_collector.feature: current_feature = scenario_collector.build_feature() features.append(current_feature) return features
def run_with_paths(self): self.load_hooks() self.load_step_definitions() context = self.context = Context(self) stream = self.config.output failed = False self.run_hook('before_all', context) for filename in self.feature_files(): if self.config.exclude(filename): continue feature = parser.parse_file(os.path.abspath(filename), language=self.config.lang) self.features.append(feature) self.feature = feature self.formatter = formatters.get_formatter(self.config, stream) self.formatter.uri(filename) failed |= feature.run(self) self.formatter.close() stream.write('\n') [reporter.feature(feature) for reporter in self.config.reporters] if failed and self.config.stop: break self.run_hook('after_all', context) [reporter.end() for reporter in self.config.reporters] return failed
def parse_feature_file(file_path: str, configs: dict) -> list: """function returns list of json testcases""" result = [] tp = TestParameters() feature = pr.parse_file(file_path) feature_link = f'{configs["BDD"]["repoLink"]}/{path.relpath(file_path, configs["BDD"]["featuresFolder"])}'\ .replace('\\', '/') if len(feature.scenarios) == 0: raise LookupError('no scenarios found in .feature file') for scenario in feature.scenarios: parsed_tags = parse_scenario_tags(scenario.tags) description_list = scenario.description description_list.insert(0, scenario.name) name = "".join(description_list) testcase = { "projectKey": configs['GENERAL']['tm4jProjectKey'], "name": name, "precondition": "The precondition.", "objective": name, "folder": f"/{configs['GENERAL']['tcFolder']}", "status": "Approved", "priority": parsed_tags['priority'], "labels": parsed_tags['tags'] + feature.tags, "link": feature_link } if configs['BDD']['parseJiraTags'] == 'True': testcase.update({"issueLinks": [parsed_tags['jira']]}) testcase.update({"testScript": parse_steps_to_script(scenario.steps)}) if hasattr(scenario, 'examples') and hasattr(scenario.examples[0], 'table'): table = scenario.examples[0].table tp.set_variables(table.headings) for row in table.rows: tp.append_values(row.headings, row.cells) logger.debug(f'TP: {tp.parameters}') testcase.update({'parameters': tp.parameters}) result.append(testcase) return result
def run_with_paths(self): self.load_hooks() self.load_step_definitions() context = self.context = Context(self) stream = self.config.output failed = False self.run_hook('before_all', context) for filename in self.feature_files(): if self.config.exclude(filename): continue feature = parser.parse_file(os.path.abspath(filename), language=self.config.lang) self.features.append(feature) self.feature = feature self.formatter = formatters.get_formatter(self.config, stream) self.formatter.uri(filename) failed = feature.run(self) self.formatter.close() stream.write('\n') [reporter.feature(feature) for reporter in self.config.reporters] if failed and self.config.stop: break self.run_hook('after_all', context) [reporter.end() for reporter in self.config.reporters] return failed
def test_every_feature_test_updated_with_keys(self): expected_export_results = { 'Files read': 3, 'Results found': 1, 'Exported': 1, 'Failed': 0 } with patch('classes.tmconnect.TM4J', new=mocked_tm4j) as patched_tm4j: self.bdd_parser.tm = patched_tm4j self.bdd_parser.do_export_results() self.assertDictEqual(self.bdd_parser.export_results, expected_export_results) posted_testcase = self.bdd_parser.parse_results[0] feature = pr.parse_file(posted_testcase['feature_file_path']) feature_tests = dict() posted_test = {'CST-T1': posted_testcase['name']} for scenario in feature: scenario_name = f'{str(scenario.name)}{str(scenario.description)}'.replace( '[]', '') n_key, n_name = split_testcase_name_key( scenario_name, self.parseconfig['GENERAL']['testCaseKeyDelimiter']) feature_tests.update({n_key: n_name}) self.assertTrue(feature_tests.items() >= posted_test.items())
def collect(self): from behave.parser import parse_file feature = parse_file(self.fspath.strpath, language=None) yield Feature(feature, self)
def create_application_documentation(self, report_file=None, output_file_name="demo.docx"): """ Create a document (docx) object and read first all ".feature" files and add their contents into the document. If a report file name is provided, it will add a "last execution" section containing the data found in the report. The report file must be the "plain" report output file generated from behave. :param report_file: The report file path (absolute or relative) :param output_file_name : The exported file name by default "demo.docx" :return: None """ log.info("Start application documentation") # Check which drive is it if window self.__document = Document() self.document.add_heading("{}".format(self.__report_title), 0) # Document title self.document.add_page_break() # Is copying file needed if platform.system() == "Windows": current_execution = os.path.abspath(os.getcwd()) repository = os.path.abspath(self.__feature_repository) if current_execution.split(os.path.sep)[0] == repository.split(os.path.sep)[0]: please_copy = False else: please_copy = True temp_file = os.path.abspath(f"{current_execution}/temp.feature") else: please_copy = False log.info(f"We are on {platform.system()} and we need to copy is set to {please_copy}") # Insert forewords sections if self.forewords_folder is not None: self.document.add_heading("Forewords", 1) list_of_files = sorted(filter(os.path.isfile, glob.glob(f"{self.forewords_folder}/*.md"))) for file in list_of_files: with open(file) as foreword_section: content = foreword_section.read() # Shift title level +1 content = re.sub(r'^(#*)', r'#\1', content) # Process included picture with relative path content = re.sub(r'!\[([^\[\]]+)\]\(([^\s]+)\)', self.__forewords_picture, content) # Process inline puml diagrams content = re.sub(r'```puml[\r|\n]{1,2}([^`]*)```', self.__forewords_inline_puml, content, flags=re.MULTILINE) # Process diagrams content = re.sub(r'!!Workflow:\s*([\.\d\w\-\_\\\/]*)\s*', self.__forewords_schema_replacement, content) insert_text(self.document, content) self.document.add_page_break() if report_file is not None or self.__include_result: self.__include_result = True self.document.add_heading("Living documentation", 1) for file in glob.iglob("{}/**/*.feature".format(self.__feature_repository), recursive=True): # Use the iterator as it's cleaner # log.debug() log.info(f"Computing {os.path.abspath(file)}") if please_copy: copyfile(os.path.abspath(file), temp_file) else: temp_file = os.path.abspath(file) try: # if file drive is not current cp file and use that copy # Use the Behave parser in order to read the feature file test = parse_file(temp_file) self.add_heading(feature=test) self.add_description(feature=test) self.add_background(feature=test) self.add_scenario(feature=test) self.document.add_page_break() # rm the file copy if please_copy: os.remove(temp_file) except Exception as exception: log.error(exception) if report_file is not None: self.add_report(file=report_file) if not output_file_name.endswith(".docx"): self.document.save(f"{output_file_name}.docx") else: self.document.save(output_file_name) log.info("Processing done.")