def test_parse_feature_after_parser_error(): parser = Parser() with assert_raises(ParserError): parser.parse(TokenScanner('# a comment\n' + 'Feature: Foo\n' + ' Scenario: Bar\n' + ' Given x\n' + ' ```\n' + ' unclosed docstring\n')) feature_file = parser.parse(TokenScanner('Feature: Foo\n' + ' Scenario: Bar\n' + ' Given x\n' ' """\n' ' closed docstring\n' ' """\n')) expected = [{'scenario': { 'id': '1', 'name': u'Bar', 'description': '', 'keyword': u'Scenario', 'tags': [], 'steps': [{ 'id': '0', 'text': u'x', 'location': {'column': 5, 'line': 3}, 'keyword': u'Given ', 'docString': { 'content': u'closed docstring', 'delimiter': '"""', 'location': {'column': 7, 'line': 4}}}], 'location': {'column': 3, 'line': 2}, 'examples': []}}] assert_equals(expected, feature_file['feature']['children'])
def parse(cls, string=None, filename=None, language=None): """ Parse either a string or a file. """ parser = Parser() # pylint:disable=redefined-variable-type # https://bitbucket.org/logilab/pylint/issues/710 if language: if language == 'pt-br': language = 'pt' token_matcher = LanguageTokenMatcher(language) else: token_matcher = TokenMatcher() # pylint:enable=redefined-variable-type if string: token_scanner = TokenScanner(string=string) else: token_scanner = TokenScanner(filename=filename) try: return cls( parser.parse(token_scanner, token_matcher=token_matcher), filename=filename, ) except ParserError as ex: raise AloeSyntaxError(filename, str(ex))
def get_feature(file_path: str): """ Read and parse given feature file""" print('Reading feature file ', file_path) file_obj = open(file_path, "r") steam = file_obj.read() parser = Parser() return parser.parse(TokenScanner(steam))
def test_parse_multiple_features(): parser = Parser() ff1 = parser.parse(TokenScanner("Feature: 1")) ff2 = parser.parse(TokenScanner("Feature: 2")) assert "1" == ff1['feature']['name'] assert "2" == ff2['feature']['name']
def parse(cls, string=None, filename=None, language=None): """ Parse either a string or a file. """ parser = Parser() if language: if language == 'pt-br': language = 'pt' token_matcher = LanguageTokenMatcher(language) else: token_matcher = TokenMatcher() if string: token_scanner = TokenScanner(string=string) else: token_scanner = TokenScanner(filename=filename) try: return cls( parser.parse(token_scanner, token_matcher=token_matcher), filename=filename, ) except ParserError as ex: raise AloeSyntaxError(filename, str(ex))
def test_parse_multiple_features(): parser = Parser() f1 = parser.parse(TokenScanner("Feature: 1")) f2 = parser.parse(TokenScanner("Feature: 2")) assert_equals("1", f1['name']) assert_equals("2", f2['name'])
def test_parse_feature_after_parser_error(): parser = Parser() with assert_raises(ParserError): parser.parse(TokenScanner('# a comment\n' + 'Feature: Foo\n' + ' Scenario: Bar\n' + ' Given x\n' + ' ```\n' + ' unclosed docstring\n')) feature = parser.parse(TokenScanner('Feature: Foo\n' + ' Scenario: Bar\n' + ' Given x\n' ' """\n' ' closed docstring\n' ' """\n')) expected = [{ 'name': u'Bar', 'keyword': u'Scenario', 'tags': [], 'steps': [{ 'text': u'x', 'type': 'Step', 'location': {'column': 5, 'line': 3}, 'keyword': u'Given ', 'argument': { 'content': u'closed docstring', 'type': 'DocString', 'location': {'column': 7, 'line': 4}}}], 'location': {'column': 3, 'line': 2}, 'type': 'Scenario'}] assert_equals(expected, feature['children'])
def create_gherkins_from_threats(threats): scenarios = Scenarios.stride gherkins = list() for threat_class in threats: for threat in threats[threat_class]: threat_gherkin = scenarios[threat_class].substitute( process=threat['process'], source=threat['source'], sourceZone=threat['sourceZone'], destination=threat['destination'], destinationZone=threat['destinationZone']) parser = Parser() feature_base = Base.feature_base try: parser.parse( TokenScanner( feature_base.substitute(component="None", scenario=threat_gherkin))) except CompositeParserException: print("Invalid gherkin template created: {}".format( threat_gherkin)) else: gherkins.append(threat_gherkin) return gherkins
def read_and_handle(filename, display_filename=None): parser = Parser() h = Handler( filename=display_filename if display_filename else filename) with open(filename, 'r') as fp: h.handle(parser.parse(fp.read())) return h.scenarios
def test_parse_multiple_features(): parser = Parser() ff1 = parser.parse(TokenScanner("Feature: 1")) ff2 = parser.parse(TokenScanner("Feature: 2")) assert_equals("1", ff1['feature']['name']) assert_equals("2", ff2['feature']['name'])
def test_should_count_tags(self): tag_counts = {} dummy = SexpRecorder() formatter = TagCountFormatter(dummy, tag_counts) parser = Parser(formatter) here = os.path.dirname(__file__) fixtures = os.path.join(here, '..', '..', '..', 'spec', 'gherkin') path = os.path.join(fixtures, 'fixtures', 'complex_with_tags.feature') gherkin = open(path).read() parser.parse(gherkin, 'f.feature', 0) tools.eq_(tag_counts, { u"@hamster": ["f.feature:58"], u"@tag1": ["f.feature:18","f.feature:23","f.feature:39", "f.feature:52","f.feature:58"], u"@tag2": ["f.feature:18","f.feature:23","f.feature:39", "f.feature:52","f.feature:58"], u"@tag3": ["f.feature:18", "f.feature:23"], u"@tag4": ["f.feature:18"], u"@neat": ["f.feature:52"], u"@more": ["f.feature:52", "f.feature:58"] })
def info(input, verbose, pyformat, **kwargs): """ Provides info about the input. Requires valid input. """ if not input: input = "-" with click.open_file(input, mode="rb") as f: parser = Parser() feature_text = f.read() feature = parser.parse(feature_text) metrics = {} steps = [a[-1] for d, k, v, a in walk_items(feature) if k == "type" and v == "Step"] scenarios = [a[-1] for d, k, v, a in walk_items(feature) if k == "type" and v == "Scenario"] # tables = [a[-1] for d, k, v, a in walk_items(feature) if k == 'type' and v == 'DataTable'] ctr_type = Counter((v for d, k, v in walk_items(feature, ancestors=False) if k == "type")) ctr_kw = Counter((v for d, k, v in walk_items(feature, ancestors=False) if k == "keyword")) metrics.update({"count": {"Keywords": ctr_kw, "Types": ctr_type}}) metrics.update({"content": {"Scenarios": [d["name"] for d in scenarios], "Steps": [d["text"] for d in steps]}}) data = metrics if verbose: data["_object"] = {"type": type(feature), "members": sorted(varsdict(feature).keys())} if pyformat: s = pformat(data) else: s = json.dumps(data, indent=2, sort_keys=True) click.echo(s)
def main(resources_path): parser = Parser() nlp_ready_resources = {} for root, dirs, files in os.walk(resources_path): for file_name in files: if file_name.endswith('.resource'): resource = os.path.splitext(basename(file_name))[0] parsed_resource_file = parser.parse( os.path.join(root, file_name)) nlp_ready_resources[resource] = {} for child in parsed_resource_file['feature']['children']: if child['type'] == 'Background': nlp_ready_resources[resource]['background'] = {} nlp_ready_resources[resource]['background'][ 'Given'] = [] for step in child['steps']: sentence = step['keyword'] + step['text'] nlp_ready_resources[resource]['background'][ 'Given'].append({'sentence': sentence}) elif child['type'] == 'Scenario': ordered_step_types = OrderedDict({ 'Given': [], 'When': [], 'Then': [] }) ordered_step_types.move_to_end('When') ordered_step_types.move_to_end('Then') nlp_ready_resources[resource][ child['name']] = ordered_step_types in_step = '' for step in child['steps']: data_table = [] sentence = step['keyword'] + step['text'] if step['keyword'] == 'When ' or step[ 'keyword'] == 'Then ' or step[ 'keyword'] == 'Given ': in_step = step['keyword'].strip( ) #note: there is a space here after the keyword if 'argument' in step: if step['argument']['type'] == 'DataTable': data_table = parse_table(step) if not in_step == 'Given': nlp_ready_resources[resource][ child['name']][in_step].append({ 'sentence': sentence, 'data_table': data_table }) else: nlp_ready_resources[resource][ child['name']][in_step].append( {'sentence': sentence}) return nlp_ready_resources
def get_scenario(feature_path, line): with open(feature_path) as fp: fp.seek(0) parser = Parser() print(feature_path) feature_file = parser.parse(TokenScanner(fp.read())) scenarios = get_scenarios(feature_file['feature']['children']) for each in scenarios: if each.line == line: return each return None
def get_feature(file_path: str): """ Read and parse given feature file""" try: with open(file_path, "r", encoding='utf8') as file_obj: steam = file_obj.read() parser = Parser() response = parser.parse(TokenScanner(steam), token_matcher=TokenMatcher('pt')) except Exception as ignored: raise Exception('Erro in read feature file, verify the file: ' + file_path) return response
def parse_one(filename): try: with open(filename, 'r') as fp: text = fp.read() try: parser = Parser() doc = parser.parse(TokenScanner(text)) return str(type(doc)), None except Exception as err: return None, ExceptionWrapper.wrap(f'Parse: {filename}', err) except Exception as err: return None, ExceptionWrapper.wrap(f'Open: {filename}', err)
def __init__(self, file=None, raw_text=None): self.raw_text = raw_text self.file = file parser = Parser() scanner = TokenScanner(self.file) try: self.gherkin_document = parser.parse(scanner) self.pickles = compiler.compile(self.gherkin_document) if len(self.pickles) < 1: raise GherkinError("no pickles found!") except Exception as e: raise GherkinError("unable to parse / pickle doc {doc}".format( doc=self.file)) from e
def __init__(self, methods, webdriver): self.features = [] self.queue_list = [] self.methods = methods self.parser = Parser() self.webdriver = webdriver self.passed_steps = 0 self.passed_scenarios = 0 self.failed_steps = 0 self.failed_scenarios = 0 self.wip_tag_flag = False self.first_scenario_flag = True self.log = ''
def test_parser(): parser = Parser() feature = parser.parse(TokenScanner("Feature: Foo")) expected = { 'comments': [], 'keyword': u'Feature', 'language': 'en', 'location': {'column': 1, 'line': 1}, 'name': u'Foo', 'children': [], 'tags': [], 'type': 'Feature'} assert_equals(expected, feature)
def test_change_the_default_language(): parser = Parser() matcher = TokenMatcher('no') feature = parser.parse(TokenScanner("Egenskap: i18n support - åæø"), matcher) expected = { 'comments': [], 'keyword': u'Egenskap', 'language': 'no', 'location': {'column': 1, 'line': 1}, 'name': u'i18n support - åæø', 'children': [], 'tags': [], 'type': 'Feature'} assert_equals(expected, feature)
def feature(path): try: with open(os.path.join(BASE_DIR, path), "r") as file: data = file.read() except FileNotFoundError: return "Not found" parser = Parser() parsed_data = parser.parse(data) parsed_data["feature"]["description"] = mistune.markdown( parsed_data["feature"]["description"]) new_feature_children = copy.deepcopy(parsed_data["feature"]["children"]) for part_i, part in enumerate(parsed_data["feature"]["children"]): if ("examples" in part): for table_i, table in enumerate(part["examples"]): for row_i, row in enumerate(table["tableBody"]): endpoint = "" for cell_i, cell in enumerate(row["cells"]): json_name = "" if cell["value"].startswith("/"): endpoint = cell["value"].replace("/", "") if cell["value"].endswith(".json"): json_name = cell["value"] if endpoint and json_name: try: # remove .feature part of path (last one) and try to get JSON there with open( os.path.join( BASE_DIR, "/".join(path.split("/")[:-1]), json_name), "r") as file: data = file.read() new_feature_children[part_i]["examples"][ table_i]["tableBody"][row_i]["cells"][ cell_i]["json"] = data except FileNotFoundError: pass parsed_data["feature"]["children"] = new_feature_children return render_template('feature.html', data=parsed_data)
def __init__(self, file_path, commit=None, test_dir=None, strip_extension=False): """ :param file_path: Path to a feature file within the Git repository. :type file_path: str :param commit: Commit SHA at which to view the Feature file :type commit: str :param test_dir: Part of the file path to strip from the beginning of the file path when returning the name of the test definition. :type test_dir: str :param strip_extension: Boolean flag to control stripping file extensions from feature test definitions :type strip_extension: bool """ self.file_path = file_path self.commit = commit self.test_dir = test_dir self.strip_extension = strip_extension with checkout(self.file_path, self.commit) as file_handle: parser = Parser() self._original_definition = parser.parse(file_handle.read()) # process scenarios self.definition = deepcopy(self._original_definition) # gherkin.parser is changing the key name for the scenarios from # 'scenarioDefinitions' to 'children'. Handle both for now. if 'children' in self.definition: child_key = 'children' elif 'scenarioDefinitions' in self.definition: child_key = 'scenarioDefinitions' else: child_key = None self.scenarios = {} if child_key: children = self.definition[child_key] for child in children: if child['type'].lower() == 'scenario': scenario = Scenario(child) self.scenarios[scenario.name] = scenario # Erase all scenarios from the definition in order # to compare the common elements in this Feature file # to the common elements in another feature file self.definition[child_key] = [ x for x in children if x['type'].lower() != 'scenario']
def test_compiles_a_scenario(): feature_text = textwrap.dedent( """\ Feature: f Scenario: s Given passing """) id_generator = IdGenerator() gherkin_document = Parser(AstBuilder(id_generator)).parse(feature_text) gherkin_document['uri'] = 'uri' pickle = Compiler(id_generator).compile(gherkin_document) expected_pickle = textwrap.dedent( """\ [ { "id": "3", "astNodeIds": ["1"], "name": "s", "language": "en", "steps": [ { "id": "2", "astNodeIds": ["0"], "type": "Context", "text": "passing" } ], "tags": [], "uri": "uri" } ] """ ) assert pickle == json.loads(expected_pickle)
def test_compiles_a_scenario_outline_with_i18n_characters(): feature_text = textwrap.dedent("""\ Feature: f Scenario Outline: with 'é' in title Given <with-é> Examples: | with-é | | passing | """) id_generator = IdGenerator() gherkin_document = Parser(AstBuilder(id_generator)).parse(feature_text) gherkin_document['uri'] = 'uri' pickle = Compiler(id_generator).compile(gherkin_document) expected_pickle = textwrap.dedent("""\ [ { "id": "6", "astNodeIds": ["4", "2"], "name": "with 'é' in title", "language": "en", "steps": [ { "id": "5", "astNodeIds": ["0", "2"], "text": "passing" } ], "tags": [], "uri": "uri" } ] """) assert_equals(pickle, json.loads(expected_pickle))
def create_feature_file_for_gherkins(feature, gherkins): feature_file = Base.feature_bare.substitute(component=feature) for gherkin in gherkins: feature_file += gherkin Parser().parse(TokenScanner(feature_file)) return feature_file
class GherkinEvents: def __init__(self, options): self.options = options self.id_generator = IdGenerator() self.parser = Parser(ast_builder=AstBuilder(self.id_generator)) self.compiler = Compiler(self.id_generator) def enum(self, source_event): uri = source_event['source']['uri'] source = source_event['source']['data'] try: gherkin_document = self.parser.parse(source) gherkin_document['uri'] = uri if (self.options.print_source): yield source_event if (self.options.print_ast): yield {'gherkinDocument': gherkin_document} if (self.options.print_pickles): pickles = self.compiler.compile(gherkin_document) for pickle in pickles: yield {'pickle': pickle} except CompositeParserException as e: for event in create_errors(e.errors, uri): yield event except ParserError as e: for event in create_errors([e], uri): yield event
def test_parser(): parser = Parser() feature_file = parser.parse(TokenScanner("Feature: Foo")) expected = { 'comments': [], 'feature': { 'keyword': u'Feature', 'language': 'en', 'location': {'column': 1, 'line': 1}, 'name': u'Foo', 'description': '', 'children': [], 'tags': [] }, } assert_equals(expected, feature_file)
def __init__(self, file=None, path=None, **defaults): if file is None: file = self if path is None: path = file.path super().__init__(path=path, **defaults) self.file = file self.path = file.path parser = Parser() scanner = TokenScanner(self.path) try: self.gherkin_document = parser.parse(scanner) self.pickles = compiler.compile(self.gherkin_document) except Exception as e: raise GherkinError("unable to parse / pickle doc {doc}".format( doc=self.path)) from e
def parse(content: str) -> GherkinDocument: """ Parse the content of a file to an AST. """ parser = Parser() try: parse_result = parser.parse(content) except ParserError as e: raise InvalidInput(e) from e try: result = converter.structure(parse_result, GherkinDocument) except Exception as e: raise DeserializeError(f"{type(e).__name__}: {e}") from e return result
def collect(self): parser = Parser() with self.fspath.open() as handle: feature = parser.parse(handle.read()) # Group the feature's children by type children = defaultdict(list) for child in feature["feature"].get("children", []): children[child["type"]].append(child) backgrounds = children.get("Background", []) self.obj = dict() for scenario_index, scenario_outline in enumerate( children["ScenarioOutline"] ): for example in self._get_example_sets(scenario_outline["examples"]): example_values = "-".join([v for d in example for v in d.values()]) function = ScenarioOutline( name=scenario_outline["name"] + ": " + example_values, parent=self, spec=scenario_outline, scenario_index=scenario_index, example=example, backgrounds=backgrounds, ) for mark in MARKS: function = getattr(pytest.mark, mark)(function) yield function for scenario_index, scenario_outline in enumerate( children["Scenario"], -1000000 ): function = ScenarioOutline( name=scenario_outline["name"], parent=self, spec=scenario_outline, scenario_index=scenario_index, backgrounds=backgrounds, ) for mark in MARKS: function = getattr(pytest.mark, mark)(function) yield function
def test_parser(): parser = Parser() feature = parser.parse(TokenScanner("Feature: Foo")) expected = { 'comments': [], 'keyword': u'Feature', 'language': 'en', 'location': { 'column': 1, 'line': 1 }, 'name': u'Foo', 'scenarioDefinitions': [], 'tags': [], 'type': 'Feature' } assert_equals(expected, feature)
def test_change_the_default_language(): parser = Parser() matcher = TokenMatcher('no') feature_file = parser.parse(TokenScanner("Egenskap: i18n support - åæø"), matcher) expected = { 'comments': [], 'feature': { 'keyword': u'Egenskap', 'language': 'no', 'location': {'column': 1, 'line': 1}, 'name': u'i18n support - åæø', 'description': '', 'children': [], 'tags': [] }, } assert_equals(expected, feature_file)
def test_parse_feature_after_parser_error(): parser = Parser() with assert_raises(ParserError): parser.parse( TokenScanner('# a comment\n' + 'Feature: Foo\n' + ' Scenario: Bar\n' + ' Given x\n' + ' ```\n' + ' unclosed docstring\n')) feature = parser.parse( TokenScanner('Feature: Foo\n' + ' Scenario: Bar\n' + ' Given x\n' ' """\n' ' closed docstring\n' ' """\n')) expected = [{ 'name': u'Bar', 'keyword': u'Scenario', 'tags': [], 'steps': [{ 'text': u'x', 'type': 'Step', 'location': { 'column': 5, 'line': 3 }, 'keyword': u'Given ', 'argument': { 'content': u'closed docstring', 'type': 'DocString', 'location': { 'column': 7, 'line': 4 } } }], 'location': { 'column': 3, 'line': 2 }, 'type': 'Scenario' }] assert_equals(expected, feature['scenarioDefinitions'])
def verify_filter(self, filters, *line_ranges): io = StringIO.StringIO() pretty_formatter = PrettyFormatter(io, True, False) filter_formatter = FilterFormatter(pretty_formatter, filters) parser = Parser(filter_formatter) path = os.path.dirname(__file__) path = os.path.join(path, '..', '..', '..', 'spec', 'gherkin') path = os.path.join(path, 'fixtures', self.file) source = open(path).read() + "# __EOF__" parser.parse(source, path, 0) source_lines = source.split('\n') expected = [] for line_range in line_ranges: expected.extend(source_lines[line_range[0] - 1:line_range[1]]) expected = '\n'.join(expected) expected = expected.replace('# __EOF__', '') tools.eq_(io.getvalue(), expected)
def parse(cls, string=None, filename=None, language=None): """ Parse either a string or a file. """ parser = Parser() if language: if language == "pt-br": language = "pt" token_matcher = LanguageTokenMatcher(language) else: token_matcher = TokenMatcher() try: return cls(parser.parse(string or filename, token_matcher=token_matcher), filename=filename) except ParserError as ex: raise AloeSyntaxError(filename, str(ex))
def test_change_the_default_language(): parser = Parser() matcher = TokenMatcher('no') feature = parser.parse(TokenScanner("Egenskap: i18n support - åæø"), matcher) expected = { 'comments': [], 'keyword': u'Egenskap', 'language': 'no', 'location': { 'column': 1, 'line': 1 }, 'name': u'i18n support - åæø', 'scenarioDefinitions': [], 'tags': [], 'type': 'Feature' } assert_equals(expected, feature)
def parse(cls, string=None, filename=None, language=None): """ Parse either a string or a file. """ parser = Parser() if language: if language == 'pt-br': language = 'pt' token_matcher = LanguageTokenMatcher(language) else: token_matcher = TokenMatcher() try: return cls( parser.parse(string or filename, token_matcher=token_matcher), filename=filename, ) except ParserError as ex: raise AloeSyntaxError(filename, str(ex))
def parse(input, pyformat, sort_keys, **kwargs): """ Converts the input into a structured object hierarchy. Requires valid input. """ if not input: input = "-" with click.open_file(input, mode="rb") as f: parser = Parser() feature_text = f.read() feature = parser.parse(feature_text) # click.echo(feature) # pickles = compile(feature, "path/to/the.feature") # click.echo(pickles) data = {} data.update(feature) if pyformat: s = pformat(data) else: s = json.dumps(data, indent=2, sort_keys=sort_keys) click.echo(s)
def read_feature(feature_path): """ Read a specific feature :param feature_path: path of the file that contains the feature :return: Feature object TODO: Refactor to use this method into for loop in read_all_bdds() method """ feature = Feature() with open(feature_path) as fp: fp.seek(0) parser = Parser() print(feature_path) feature_file = parser.parse(TokenScanner(fp.read())) feature.feature_name = feature_file['feature']['name'] feature.language = feature_file['feature']['language'] feature.path_name = feature_path feature.tags = feature_file['feature']['tags'] feature.line = feature_file['feature']['location']['line'] feature.scenarios = get_scenarios(feature_file['feature']['children']) return feature
def test_compiles_a_scenario_outline_with_i18n_characters(): feature_text = textwrap.dedent("""\ Feature: f Scenario Outline: with 'é' in title Given <with-é> Examples: | with-é | | passing | """) output = Parser().parse(feature_text) pickle = compiler.compile(output, 'features/hello.feature') expected_pickle = textwrap.dedent("""\ [ { "name": "Scenario: with 'é' in title", "steps": [ { "text": "passing", "arguments": [], "locations": [ { "line": 6, "column": 5, "path": "features/hello.feature" }, { "line": 3, "column": 11, "path": "features/hello.feature" } ] } ], "tags": [], "locations": [ { "line": 6, "column": 5, "path": "features/hello.feature" }, { "line": 2, "column": 3, "path": "features/hello.feature" } ] } ] """) assert_equals(pickle, json.loads(expected_pickle))
def read_all_bdds(url): features = [] for root, dirs, files in os.walk(url + '/features/desktop/'): for file in files: if file.endswith(".feature"): feature = Feature() file_path = os.path.join(root, file) with open(file_path) as fp: fp.seek(0) parser = Parser() print(file_path) feature_file = parser.parse(TokenScanner(fp.read())) feature.feature_name = feature_file['feature']['name'] feature.language = feature_file['feature']['language'] feature.path_name = file_path feature.tags = feature_file['feature']['tags'] feature.line = feature_file['feature']['location']['line'] feature.scenarios = get_scenarios( feature_file['feature']['children']) features.append(feature) return features
class GherkinEvents: def __init__(self, options): self.options = options self.parser = Parser() def enum(self, source_event): uri = source_event['uri'] source = source_event['data'] events = [] try: gherkin_document = self.parser.parse(source) if (self.options.print_source): events.append(source_event) if (self.options.print_ast): events.append({ 'type': 'gherkin-document', 'uri': uri, 'document': gherkin_document }) if (self.options.print_pickles): pickles = compile(gherkin_document) for pickle in pickles: events.append({ 'type': 'pickle', 'uri': uri, 'pickle': pickle }) except CompositeParserException as e: add_errors(events, e.errors, uri) except ParserError as e: add_errors(events, [e], uri) return events
def test_should_raise_when_feature_does_not_parse(self): p = Parser(Mock(PrettyFormatter)) with tools.assert_raises(ParseError): p.parse(u"Feature: f\nFeature: f", __file__, inspect.currentframe().f_back.f_lineno - 1)
def __init__(self, options): self.options = options self.parser = Parser()
def __init__(self, stepfile_filepath): self._load_step_definitions(stepfile_filepath) self.gherkinparser = GherkinParser()
class Foxpath(): mappings = [] def __init__(self, stepfile_filepath): self._load_step_definitions(stepfile_filepath) self.gherkinparser = GherkinParser() def _load_step_definitions(self, filepath): Foxpath.mappings = [] # remarkably, this seems to be sufficient if six.PY3: SourceFileLoader('', filepath).load_module() else: load_source('', filepath) def load_feature(self, feature_txt, codelists={}, today=None): if today: today = datetime.strptime(today, '%Y-%m-%d').date() else: today = datetime.today().date() kwargs = { 'codelists': codelists, 'today': today, } return self._gherkinify_feature(feature_txt, **kwargs) def _find_matching_expr(self, mappings, line): for regex, fn in mappings: r = regex.match(line) if r: return fn, r.groups() print('I did not understand {}'.format(line)) def _parse(self, ctx, **kwargs): def __parse(activity): for step_type, expr_fn, expr_groups in ctx: result = True try: if expr_groups: expr_fn(activity, *expr_groups, **kwargs) else: expr_fn(activity, **kwargs) except StepException as e: result = False explain = str(e) if step_type == 'then': if result: return True, '' else: return False, explain else: if not result: return None, explain else: pass return __parse def _gherkinify_feature(self, feature_txt, **kwargs): feature = self.gherkinparser.parse(feature_txt) feature = feature['feature'] feature_name = feature['name'] tests = [] for test in feature['children']: test_name = test['name'] test_steps = test['steps'] ctx = [] step_type = 'given' for step in test_steps: if step['keyword'].lower().strip() == 'then': step_type = 'then' expr_fn, expr_groups = self._find_matching_expr( Foxpath.mappings, step['text']) ctx.append((step_type, expr_fn, expr_groups)) tests.append((test_name, self._parse(ctx, **kwargs))) return (feature_name, tests)
def parsing(): """This function handles parsing command line arguments """ descr = 'Ghenerate, the Gherkin Python Step Generator from Quantarhei' parser = argparse.ArgumentParser(description=descr+' ...') parser.add_argument("file", metavar='file', type=str, help='feature file to be processed', nargs='?') # # Generator options # parser.add_argument("-v", "--version", action="store_true", help="shows Quantarhei package version") parser.add_argument("-i", "--info", action='store_true', help="shows detailed information about Quantarhei"+ " installation") parser.add_argument("-d", "--destination", type=str, help="specifies destination directory for the"+ " generated step file") parser.add_argument("-n", "--no-pass", action="store_true", help="empty tests should not pass (default is" +" passing empty tests)") parser.add_argument("-f", "--start-from", type=int, help="step functions will be numberred starting" +" from this value") # # Parsing all arguments # args = parser.parse_args() # # show longer info # if args.info: qr.printlog("\n" +"ghenerate: Quantarhei Gherkin Python Step Generator\n", verbose=True, loglevel=0) if not args.version: qr.printlog("Package version: ", qr.Manager().version, "\n", verbose=True, loglevel=0) return 0 # # show just Quantarhei version number # if args.version: qr.printlog("Quantarhei package version: ", qr.Manager().version, "\n", verbose=True, loglevel=0) return 0 if args.destination: ddir = args.destination else: ddir = "ghen" if args.file: print("") print(descr+" ...") filename = args.file else: print("No file specified: quiting") parser.print_help() return 0 steps_pass = True if args.no_pass: steps_pass = False k_from = 0 if args.start_from: k_from = args.start_from try: with open(filename, 'r') as myfile: data = myfile.read() except: raise Exception("Problems reading file: "+filename) parser = Parser() try: feature_file = parser.parse(TokenScanner(data)) except: raise Exception("Problem parsing file: "+filename+ " - is it a feature file?") try: children = feature_file["feature"]["children"] except: raise Exception("No scenarii or scenario outlines") return dict(children=children, ddir=ddir, steps_pass=steps_pass, filename=filename, k_from=k_from)