def test_parse_multiple_features(): parser = Parser() ff1 = parser.parse(TokenScanner("Feature: 1")) ff2 = parser.parse(TokenScanner("Feature: 2")) assert "1" == ff1['feature']['name'] assert "2" == ff2['feature']['name']
def parse(cls, string=None, filename=None, language=None): """ Parse either a string or a file. """ parser = Parser() if language: if language == 'pt-br': language = 'pt' token_matcher = LanguageTokenMatcher(language) else: token_matcher = TokenMatcher() if string: token_scanner = TokenScanner(string=string) else: token_scanner = TokenScanner(filename=filename) try: return cls( parser.parse(token_scanner, token_matcher=token_matcher), filename=filename, ) except ParserError as ex: raise AloeSyntaxError(filename, str(ex))
def test_parse_multiple_features(): parser = Parser() ff1 = parser.parse(TokenScanner("Feature: 1")) ff2 = parser.parse(TokenScanner("Feature: 2")) assert_equals("1", ff1['feature']['name']) assert_equals("2", ff2['feature']['name'])
def create_feature_file_for_gherkins(feature, gherkins): feature_file = Base.feature_bare.substitute(component=feature) for gherkin in gherkins: feature_file += gherkin Parser().parse(TokenScanner(feature_file)) return feature_file
def create_gherkins_from_threats(threats): scenarios = Scenarios.stride gherkins = list() for threat_class in threats: for threat in threats[threat_class]: threat_gherkin = scenarios[threat_class].substitute( process=threat['process'], source=threat['source'], sourceZone=threat['sourceZone'], destination=threat['destination'], destinationZone=threat['destinationZone']) parser = Parser() feature_base = Base.feature_base try: parser.parse( TokenScanner( feature_base.substitute(component="None", scenario=threat_gherkin))) except CompositeParserException: print("Invalid gherkin template created: {}".format( threat_gherkin)) else: gherkins.append(threat_gherkin) return gherkins
def test_compiles_a_scenario_outline_with_i18n_characters(): feature_text = textwrap.dedent("""\ Feature: f Scenario Outline: with 'é' in title Given <with-é> Examples: | with-é | | passing | """) id_generator = IdGenerator() gherkin_document = Parser(AstBuilder(id_generator)).parse(feature_text) gherkin_document['uri'] = 'uri' pickle = Compiler(id_generator).compile(gherkin_document) expected_pickle = textwrap.dedent("""\ [ { "id": "6", "astNodeIds": ["4", "2"], "name": "with 'é' in title", "language": "en", "steps": [ { "id": "5", "astNodeIds": ["0", "2"], "text": "passing" } ], "tags": [], "uri": "uri" } ] """) assert_equals(pickle, json.loads(expected_pickle))
def read_and_handle(filename, display_filename=None): parser = Parser() h = Handler( filename=display_filename if display_filename else filename) with open(filename, 'r') as fp: h.handle(parser.parse(fp.read())) return h.scenarios
def test_compiles_a_scenario(): feature_text = textwrap.dedent( """\ Feature: f Scenario: s Given passing """) id_generator = IdGenerator() gherkin_document = Parser(AstBuilder(id_generator)).parse(feature_text) gherkin_document['uri'] = 'uri' pickle = Compiler(id_generator).compile(gherkin_document) expected_pickle = textwrap.dedent( """\ [ { "id": "3", "astNodeIds": ["1"], "name": "s", "language": "en", "steps": [ { "id": "2", "astNodeIds": ["0"], "type": "Context", "text": "passing" } ], "tags": [], "uri": "uri" } ] """ ) assert pickle == json.loads(expected_pickle)
def test_parse_feature_after_parser_error(): parser = Parser() with assert_raises(ParserError): parser.parse(TokenScanner('# a comment\n' + 'Feature: Foo\n' + ' Scenario: Bar\n' + ' Given x\n' + ' ```\n' + ' unclosed docstring\n')) feature_file = parser.parse(TokenScanner('Feature: Foo\n' + ' Scenario: Bar\n' + ' Given x\n' ' """\n' ' closed docstring\n' ' """\n')) expected = [{'scenario': { 'id': '1', 'name': u'Bar', 'description': '', 'keyword': u'Scenario', 'tags': [], 'steps': [{ 'id': '0', 'text': u'x', 'location': {'column': 5, 'line': 3}, 'keyword': u'Given ', 'docString': { 'content': u'closed docstring', 'delimiter': '"""', 'location': {'column': 7, 'line': 4}}}], 'location': {'column': 3, 'line': 2}, 'examples': []}}] assert_equals(expected, feature_file['feature']['children'])
def get_feature(file_path: str): """ Read and parse given feature file""" print('Reading feature file ', file_path) file_obj = open(file_path, "r") steam = file_obj.read() parser = Parser() return parser.parse(TokenScanner(steam))
def main(resources_path): parser = Parser() nlp_ready_resources = {} for root, dirs, files in os.walk(resources_path): for file_name in files: if file_name.endswith('.resource'): resource = os.path.splitext(basename(file_name))[0] parsed_resource_file = parser.parse( os.path.join(root, file_name)) nlp_ready_resources[resource] = {} for child in parsed_resource_file['feature']['children']: if child['type'] == 'Background': nlp_ready_resources[resource]['background'] = {} nlp_ready_resources[resource]['background'][ 'Given'] = [] for step in child['steps']: sentence = step['keyword'] + step['text'] nlp_ready_resources[resource]['background'][ 'Given'].append({'sentence': sentence}) elif child['type'] == 'Scenario': ordered_step_types = OrderedDict({ 'Given': [], 'When': [], 'Then': [] }) ordered_step_types.move_to_end('When') ordered_step_types.move_to_end('Then') nlp_ready_resources[resource][ child['name']] = ordered_step_types in_step = '' for step in child['steps']: data_table = [] sentence = step['keyword'] + step['text'] if step['keyword'] == 'When ' or step[ 'keyword'] == 'Then ' or step[ 'keyword'] == 'Given ': in_step = step['keyword'].strip( ) #note: there is a space here after the keyword if 'argument' in step: if step['argument']['type'] == 'DataTable': data_table = parse_table(step) if not in_step == 'Given': nlp_ready_resources[resource][ child['name']][in_step].append({ 'sentence': sentence, 'data_table': data_table }) else: nlp_ready_resources[resource][ child['name']][in_step].append( {'sentence': sentence}) return nlp_ready_resources
def get_scenario(feature_path, line): with open(feature_path) as fp: fp.seek(0) parser = Parser() print(feature_path) feature_file = parser.parse(TokenScanner(fp.read())) scenarios = get_scenarios(feature_file['feature']['children']) for each in scenarios: if each.line == line: return each return None
def parse_one(filename): try: with open(filename, 'r') as fp: text = fp.read() try: parser = Parser() doc = parser.parse(TokenScanner(text)) return str(type(doc)), None except Exception as err: return None, ExceptionWrapper.wrap(f'Parse: {filename}', err) except Exception as err: return None, ExceptionWrapper.wrap(f'Open: {filename}', err)
def get_feature(file_path: str): """ Read and parse given feature file""" try: with open(file_path, "r", encoding='utf8') as file_obj: steam = file_obj.read() parser = Parser() response = parser.parse(TokenScanner(steam), token_matcher=TokenMatcher('pt')) except Exception as ignored: raise Exception('Erro in read feature file, verify the file: ' + file_path) return response
def __init__(self, methods, webdriver): self.features = [] self.queue_list = [] self.methods = methods self.parser = Parser() self.webdriver = webdriver self.passed_steps = 0 self.passed_scenarios = 0 self.failed_steps = 0 self.failed_scenarios = 0 self.wip_tag_flag = False self.first_scenario_flag = True self.log = ''
def __init__(self, file=None, raw_text=None): self.raw_text = raw_text self.file = file parser = Parser() scanner = TokenScanner(self.file) try: self.gherkin_document = parser.parse(scanner) self.pickles = compiler.compile(self.gherkin_document) if len(self.pickles) < 1: raise GherkinError("no pickles found!") except Exception as e: raise GherkinError("unable to parse / pickle doc {doc}".format( doc=self.file)) from e
def test_compiles_a_scenario_outline_with_i18n_characters(): feature_text = textwrap.dedent("""\ Feature: f Scenario Outline: with 'é' in title Given <with-é> Examples: | with-é | | passing | """) output = Parser().parse(feature_text) pickle = compiler.compile(output, 'features/hello.feature') expected_pickle = textwrap.dedent("""\ [ { "name": "Scenario: with 'é' in title", "steps": [ { "text": "passing", "arguments": [], "locations": [ { "line": 6, "column": 5, "path": "features/hello.feature" }, { "line": 3, "column": 11, "path": "features/hello.feature" } ] } ], "tags": [], "locations": [ { "line": 6, "column": 5, "path": "features/hello.feature" }, { "line": 2, "column": 3, "path": "features/hello.feature" } ] } ] """) assert_equals(pickle, json.loads(expected_pickle))
def feature(path): try: with open(os.path.join(BASE_DIR, path), "r") as file: data = file.read() except FileNotFoundError: return "Not found" parser = Parser() parsed_data = parser.parse(data) parsed_data["feature"]["description"] = mistune.markdown( parsed_data["feature"]["description"]) new_feature_children = copy.deepcopy(parsed_data["feature"]["children"]) for part_i, part in enumerate(parsed_data["feature"]["children"]): if ("examples" in part): for table_i, table in enumerate(part["examples"]): for row_i, row in enumerate(table["tableBody"]): endpoint = "" for cell_i, cell in enumerate(row["cells"]): json_name = "" if cell["value"].startswith("/"): endpoint = cell["value"].replace("/", "") if cell["value"].endswith(".json"): json_name = cell["value"] if endpoint and json_name: try: # remove .feature part of path (last one) and try to get JSON there with open( os.path.join( BASE_DIR, "/".join(path.split("/")[:-1]), json_name), "r") as file: data = file.read() new_feature_children[part_i]["examples"][ table_i]["tableBody"][row_i]["cells"][ cell_i]["json"] = data except FileNotFoundError: pass parsed_data["feature"]["children"] = new_feature_children return render_template('feature.html', data=parsed_data)
def ast_from_gherkin_file(fd: TextIO) -> dict: """Parse a file with a gherkin document into an Abstract Syntax Tree Parameters ---------- fd : file-like object filedescriptor of file containing gherking document Returns ------- ast : dict a dictionary representation of the gherkin file as returned by gherkin.parser.Parser.parse """ fd.seek(0) return Parser().parse(TokenScanner(fd.read()))
def test_parser(): parser = Parser() feature_file = parser.parse(TokenScanner("Feature: Foo")) expected = { 'comments': [], 'feature': { 'keyword': u'Feature', 'language': 'en', 'location': {'column': 1, 'line': 1}, 'name': u'Foo', 'description': '', 'children': [], 'tags': [] }, } assert_equals(expected, feature_file)
def __init__(self, file=None, path=None, **defaults): if file is None: file = self if path is None: path = file.path super().__init__(path=path, **defaults) self.file = file self.path = file.path parser = Parser() scanner = TokenScanner(self.path) try: self.gherkin_document = parser.parse(scanner) self.pickles = compiler.compile(self.gherkin_document) except Exception as e: raise GherkinError("unable to parse / pickle doc {doc}".format( doc=self.path)) from e
def parse(content: str) -> GherkinDocument: """ Parse the content of a file to an AST. """ parser = Parser() try: parse_result = parser.parse(content) except ParserError as e: raise InvalidInput(e) from e try: result = converter.structure(parse_result, GherkinDocument) except Exception as e: raise DeserializeError(f"{type(e).__name__}: {e}") from e return result
def collect(self): parser = Parser() with self.fspath.open() as handle: feature = parser.parse(handle.read()) # Group the feature's children by type children = defaultdict(list) for child in feature["feature"].get("children", []): children[child["type"]].append(child) backgrounds = children.get("Background", []) self.obj = dict() for scenario_index, scenario_outline in enumerate( children["ScenarioOutline"] ): for example in self._get_example_sets(scenario_outline["examples"]): example_values = "-".join([v for d in example for v in d.values()]) function = ScenarioOutline( name=scenario_outline["name"] + ": " + example_values, parent=self, spec=scenario_outline, scenario_index=scenario_index, example=example, backgrounds=backgrounds, ) for mark in MARKS: function = getattr(pytest.mark, mark)(function) yield function for scenario_index, scenario_outline in enumerate( children["Scenario"], -1000000 ): function = ScenarioOutline( name=scenario_outline["name"], parent=self, spec=scenario_outline, scenario_index=scenario_index, backgrounds=backgrounds, ) for mark in MARKS: function = getattr(pytest.mark, mark)(function) yield function
def test_change_the_default_language(): parser = Parser() matcher = TokenMatcher('no') feature_file = parser.parse(TokenScanner("Egenskap: i18n support - åæø"), matcher) expected = { 'comments': [], 'feature': { 'keyword': u'Egenskap', 'language': 'no', 'location': {'column': 1, 'line': 1}, 'name': u'i18n support - åæø', 'description': '', 'children': [], 'tags': [] }, } assert_equals(expected, feature_file)
def test_parser(): parser = Parser() feature = parser.parse(TokenScanner("Feature: Foo")) expected = { 'comments': [], 'keyword': u'Feature', 'language': 'en', 'location': { 'column': 1, 'line': 1 }, 'name': u'Foo', 'scenarioDefinitions': [], 'tags': [], 'type': 'Feature' } assert_equals(expected, feature)
def test_parse_feature_after_parser_error(): parser = Parser() with assert_raises(ParserError): parser.parse( TokenScanner('# a comment\n' + 'Feature: Foo\n' + ' Scenario: Bar\n' + ' Given x\n' + ' ```\n' + ' unclosed docstring\n')) feature = parser.parse( TokenScanner('Feature: Foo\n' + ' Scenario: Bar\n' + ' Given x\n' ' """\n' ' closed docstring\n' ' """\n')) expected = [{ 'name': u'Bar', 'keyword': u'Scenario', 'tags': [], 'steps': [{ 'text': u'x', 'type': 'Step', 'location': { 'column': 5, 'line': 3 }, 'keyword': u'Given ', 'argument': { 'content': u'closed docstring', 'type': 'DocString', 'location': { 'column': 7, 'line': 4 } } }], 'location': { 'column': 3, 'line': 2 }, 'type': 'Scenario' }] assert_equals(expected, feature['scenarioDefinitions'])
def parse(cls, string=None, filename=None, language=None): """ Parse either a string or a file. """ parser = Parser() if language: if language == "pt-br": language = "pt" token_matcher = LanguageTokenMatcher(language) else: token_matcher = TokenMatcher() try: return cls(parser.parse(string or filename, token_matcher=token_matcher), filename=filename) except ParserError as ex: raise AloeSyntaxError(filename, str(ex))
def test_change_the_default_language(): parser = Parser() matcher = TokenMatcher('no') feature = parser.parse(TokenScanner("Egenskap: i18n support - åæø"), matcher) expected = { 'comments': [], 'keyword': u'Egenskap', 'language': 'no', 'location': { 'column': 1, 'line': 1 }, 'name': u'i18n support - åæø', 'scenarioDefinitions': [], 'tags': [], 'type': 'Feature' } assert_equals(expected, feature)
def read_feature(feature_path): """ Read a specific feature :param feature_path: path of the file that contains the feature :return: Feature object TODO: Refactor to use this method into for loop in read_all_bdds() method """ feature = Feature() with open(feature_path) as fp: fp.seek(0) parser = Parser() print(feature_path) feature_file = parser.parse(TokenScanner(fp.read())) feature.feature_name = feature_file['feature']['name'] feature.language = feature_file['feature']['language'] feature.path_name = feature_path feature.tags = feature_file['feature']['tags'] feature.line = feature_file['feature']['location']['line'] feature.scenarios = get_scenarios(feature_file['feature']['children']) return feature
def test_compiles_a_scenario(): feature_text = textwrap.dedent("""\ Feature: f Scenario: s Given passing """) output = Parser().parse(feature_text) pickle = compiler.compile(output, 'features/hello.feature') expected_pickle = textwrap.dedent("""\ [ { "name": "Scenario: s", "steps": [ { "text": "passing", "arguments": [], "locations": [ { "line": 3, "column": 11, "path": "features/hello.feature" } ] } ], "tags": [], "locations": [ { "line": 2, "column": 3, "path": "features/hello.feature" } ] } ] """) assert_equals(pickle, json.loads(expected_pickle))