def test_parse_multiple_features(): parser = Parser() ff1 = parser.parse(TokenScanner("Feature: 1")) ff2 = parser.parse(TokenScanner("Feature: 2")) assert_equals("1", ff1['feature']['name']) assert_equals("2", ff2['feature']['name'])
def test_parse_feature_after_parser_error(): parser = Parser() with assert_raises(ParserError): parser.parse(TokenScanner('# a comment\n' + 'Feature: Foo\n' + ' Scenario: Bar\n' + ' Given x\n' + ' ```\n' + ' unclosed docstring\n')) feature_file = parser.parse(TokenScanner('Feature: Foo\n' + ' Scenario: Bar\n' + ' Given x\n' ' """\n' ' closed docstring\n' ' """\n')) expected = [{'scenario': { 'id': '1', 'name': u'Bar', 'description': '', 'keyword': u'Scenario', 'tags': [], 'steps': [{ 'id': '0', 'text': u'x', 'location': {'column': 5, 'line': 3}, 'keyword': u'Given ', 'docString': { 'content': u'closed docstring', 'delimiter': '"""', 'location': {'column': 7, 'line': 4}}}], 'location': {'column': 3, 'line': 2}, 'examples': []}}] assert_equals(expected, feature_file['feature']['children'])
def test_parse_multiple_features(): parser = Parser() ff1 = parser.parse(TokenScanner("Feature: 1")) ff2 = parser.parse(TokenScanner("Feature: 2")) assert "1" == ff1['feature']['name'] assert "2" == ff2['feature']['name']
def create_gherkins_from_threats(threats): scenarios = Scenarios.stride gherkins = list() for threat_class in threats: for threat in threats[threat_class]: threat_gherkin = scenarios[threat_class].substitute( process=threat['process'], source=threat['source'], sourceZone=threat['sourceZone'], destination=threat['destination'], destinationZone=threat['destinationZone']) parser = Parser() feature_base = Base.feature_base try: parser.parse( TokenScanner( feature_base.substitute(component="None", scenario=threat_gherkin))) except CompositeParserException: print("Invalid gherkin template created: {}".format( threat_gherkin)) else: gherkins.append(threat_gherkin) return gherkins
def create_feature_file_for_gherkins(feature, gherkins): feature_file = Base.feature_bare.substitute(component=feature) for gherkin in gherkins: feature_file += gherkin Parser().parse(TokenScanner(feature_file)) return feature_file
def get_feature(file_path: str): """ Read and parse given feature file""" print('Reading feature file ', file_path) file_obj = open(file_path, "r") steam = file_obj.read() parser = Parser() return parser.parse(TokenScanner(steam))
def test_parse_feature_after_parser_error(): parser = Parser() with assert_raises(ParserError): parser.parse( TokenScanner('# a comment\n' + 'Feature: Foo\n' + ' Scenario: Bar\n' + ' Given x\n' + ' ```\n' + ' unclosed docstring\n')) feature = parser.parse( TokenScanner('Feature: Foo\n' + ' Scenario: Bar\n' + ' Given x\n' ' """\n' ' closed docstring\n' ' """\n')) expected = [{ 'name': u'Bar', 'keyword': u'Scenario', 'tags': [], 'steps': [{ 'text': u'x', 'type': 'Step', 'location': { 'column': 5, 'line': 3 }, 'keyword': u'Given ', 'argument': { 'content': u'closed docstring', 'type': 'DocString', 'location': { 'column': 7, 'line': 4 } } }], 'location': { 'column': 3, 'line': 2 }, 'type': 'Scenario' }] assert_equals(expected, feature['scenarioDefinitions'])
def get_scenario(feature_path, line): with open(feature_path) as fp: fp.seek(0) parser = Parser() print(feature_path) feature_file = parser.parse(TokenScanner(fp.read())) scenarios = get_scenarios(feature_file['feature']['children']) for each in scenarios: if each.line == line: return each return None
def get_feature(file_path: str): """ Read and parse given feature file""" try: with open(file_path, "r", encoding='utf8') as file_obj: steam = file_obj.read() parser = Parser() response = parser.parse(TokenScanner(steam), token_matcher=TokenMatcher('pt')) except Exception as ignored: raise Exception('Erro in read feature file, verify the file: ' + file_path) return response
def parse_one(filename): try: with open(filename, 'r') as fp: text = fp.read() try: parser = Parser() doc = parser.parse(TokenScanner(text)) return str(type(doc)), None except Exception as err: return None, ExceptionWrapper.wrap(f'Parse: {filename}', err) except Exception as err: return None, ExceptionWrapper.wrap(f'Open: {filename}', err)
def __init__(self, file=None, raw_text=None): self.raw_text = raw_text self.file = file parser = Parser() scanner = TokenScanner(self.file) try: self.gherkin_document = parser.parse(scanner) self.pickles = compiler.compile(self.gherkin_document) if len(self.pickles) < 1: raise GherkinError("no pickles found!") except Exception as e: raise GherkinError("unable to parse / pickle doc {doc}".format( doc=self.file)) from e
def ast_from_gherkin_file(fd: TextIO) -> dict: """Parse a file with a gherkin document into an Abstract Syntax Tree Parameters ---------- fd : file-like object filedescriptor of file containing gherking document Returns ------- ast : dict a dictionary representation of the gherkin file as returned by gherkin.parser.Parser.parse """ fd.seek(0) return Parser().parse(TokenScanner(fd.read()))
def test_parser(): parser = Parser() feature_file = parser.parse(TokenScanner("Feature: Foo")) expected = { 'comments': [], 'feature': { 'keyword': u'Feature', 'language': 'en', 'location': {'column': 1, 'line': 1}, 'name': u'Foo', 'description': '', 'children': [], 'tags': [] }, } assert_equals(expected, feature_file)
def __init__(self, file=None, path=None, **defaults): if file is None: file = self if path is None: path = file.path super().__init__(path=path, **defaults) self.file = file self.path = file.path parser = Parser() scanner = TokenScanner(self.path) try: self.gherkin_document = parser.parse(scanner) self.pickles = compiler.compile(self.gherkin_document) except Exception as e: raise GherkinError("unable to parse / pickle doc {doc}".format( doc=self.path)) from e
def test_change_the_default_language(): parser = Parser() matcher = TokenMatcher('no') feature_file = parser.parse(TokenScanner("Egenskap: i18n support - åæø"), matcher) expected = { 'comments': [], 'feature': { 'keyword': u'Egenskap', 'language': 'no', 'location': {'column': 1, 'line': 1}, 'name': u'i18n support - åæø', 'description': '', 'children': [], 'tags': [] }, } assert_equals(expected, feature_file)
def test_parser(): parser = Parser() feature = parser.parse(TokenScanner("Feature: Foo")) expected = { 'comments': [], 'keyword': u'Feature', 'language': 'en', 'location': { 'column': 1, 'line': 1 }, 'name': u'Foo', 'scenarioDefinitions': [], 'tags': [], 'type': 'Feature' } assert_equals(expected, feature)
def test_change_the_default_language(): parser = Parser() matcher = TokenMatcher('no') feature = parser.parse(TokenScanner("Egenskap: i18n support - åæø"), matcher) expected = { 'comments': [], 'keyword': u'Egenskap', 'language': 'no', 'location': { 'column': 1, 'line': 1 }, 'name': u'i18n support - åæø', 'scenarioDefinitions': [], 'tags': [], 'type': 'Feature' } assert_equals(expected, feature)
def read_feature(feature_path): """ Read a specific feature :param feature_path: path of the file that contains the feature :return: Feature object TODO: Refactor to use this method into for loop in read_all_bdds() method """ feature = Feature() with open(feature_path) as fp: fp.seek(0) parser = Parser() print(feature_path) feature_file = parser.parse(TokenScanner(fp.read())) feature.feature_name = feature_file['feature']['name'] feature.language = feature_file['feature']['language'] feature.path_name = feature_path feature.tags = feature_file['feature']['tags'] feature.line = feature_file['feature']['location']['line'] feature.scenarios = get_scenarios(feature_file['feature']['children']) return feature
def read_all_bdds(url): features = [] for root, dirs, files in os.walk(url + '/features/desktop/'): for file in files: if file.endswith(".feature"): feature = Feature() file_path = os.path.join(root, file) with open(file_path) as fp: fp.seek(0) parser = Parser() print(file_path) feature_file = parser.parse(TokenScanner(fp.read())) feature.feature_name = feature_file['feature']['name'] feature.language = feature_file['feature']['language'] feature.path_name = file_path feature.tags = feature_file['feature']['tags'] feature.line = feature_file['feature']['location']['line'] feature.scenarios = get_scenarios( feature_file['feature']['children']) features.append(feature) return features
def parse(self, cucumber_file): # self.backend.configure() parser = Parser() gherkin_file = parser.parse(TokenScanner(cucumber_file)) data = parser.parse(gherkin_file)
def parseFeatureFile(path, files): parser = Parser() feature = parser.parse(TokenScanner(path + "/" + files)) return feature
def parseFeatureFile(feature_file): parser = Parser() feature = parser.parse(TokenScanner(feature_file)) return feature
def validate_feature_file(feature_file, unallowed_tags): """Validates a feature file. Args: feature_file_path: the path to the feature file. Returns: a list of errors. """ file_status, feature_file_path = feature_file with open(feature_file_path, "r") as fp: contents = fp.read() parser = Parser() try: feature_file = parser.parse(TokenScanner(contents)) except Exception as e: return [ "[ERROR] Errors exist in " + feature_file_path, "\t- Could not parse the file! " + str(e) ] errors = [] feature_tag_names = [ tag["name"] for tag in feature_file["feature"]["tags"] ] scenarios = [ feature_child for feature_child in feature_file["feature"]["children"] if feature_child['type'] == 'Scenario' or feature_child['type'] == 'ScenarioOutline' ] # validate tags in the feature for unallowed_tag in set(unallowed_tags).intersection(feature_tag_names): errors.append( "\t- Remove the %s tag from the feature before you commit" % unallowed_tag) # validate tags in all the scenarios for scenario in scenarios: for tag in scenario["tags"]: if tag["name"] in unallowed_tags: errors.append( "\t- Before you commit, remove the %s tag from the following scenario:\n\t\t'%s'" % (tag["name"], scenario["name"])) # validate scenario numbers prev_scenario_num = "0" for curr_scenario in scenarios: # validate prescence if "." not in curr_scenario["name"]: errors.append( "\t- The following scenario needs to start with a number followed by a period: '%s'" % curr_scenario["name"]) break curr_scenario_num = curr_scenario["name"].split(".")[0].strip() if not curr_scenario_num or curr_scenario_num.isalpha(): errors.append( "\t- The following scenario needs to start with a number: '%s'" % curr_scenario["name"]) break # validate ordering if prev_scenario_num.isdigit(): # previous scenario didn't have a letter if curr_scenario_num.isdigit(): # current scenario doesn't have a letter if int(curr_scenario_num) != int(prev_scenario_num) + 1: errors.append( "\t- The ordering of the scenarios breaks down on Scenario '%s'" % curr_scenario_num) break else: # current scenario has a letter if curr_scenario_num[-1] != "a": errors.append( "\t- The ordering of the scenarios breaks down on Scenario '%s'" % curr_scenario_num) break else: # previous scenario had a letter prev_scenario_letter = prev_scenario_num[-1] if curr_scenario_num.isdigit(): # current scenario doesn't have a letter if int(curr_scenario_num) != int(prev_scenario_num[:-1]) + 1: if ord(curr_scenario_num[-1] ) != ord(prev_scenario_letter) + 1: errors.append( "\t- The ordering of the scenarios breaks down on Scenario '%s'" % curr_scenario_num) break else: # current scenario has a letter if int(curr_scenario_num[:-1]) != int( prev_scenario_num[:-1]) + 1: # number has not been incremented if ord(curr_scenario_num[-1] ) != ord(prev_scenario_letter) + 1: errors.append( "\t- The ordering of the scenarios breaks down on Scenario '%s'" % curr_scenario_num) break else: # number has been incremented if curr_scenario_num[-1] != "a": errors.append( "\t- The ordering of the scenarios breaks down on Scenario '%s'" % curr_scenario_num) break prev_scenario_num = curr_scenario_num if errors: errors.insert(0, "[ERROR] Errors exist in " + feature_file_path) return errors
def parsing(): """This function handles parsing command line arguments """ descr = 'Ghenerate, the Gherkin Python Step Generator from Quantarhei' parser = argparse.ArgumentParser(description=descr + ' ...') parser.add_argument("file", metavar='file', type=str, help='feature file to be processed', nargs='?') # # Generator options # parser.add_argument("-v", "--version", action="store_true", help="shows Quantarhei package version") parser.add_argument("-i", "--info", action='store_true', help="shows detailed information about Quantarhei" + " installation") parser.add_argument("-d", "--destination", type=str, help="specifies destination directory for the" + " generated step file") parser.add_argument("-n", "--no-pass", action="store_true", help="empty tests should not pass (default is" + " passing empty tests)") parser.add_argument("-f", "--start-from", type=int, help="step functions will be numberred starting" + " from this value") # # Parsing all arguments # args = parser.parse_args() # # show longer info # if args.info: qr.printlog("\n" + "ghenerate: Quantarhei Gherkin Python Step Generator\n", verbose=True, loglevel=0) if not args.version: qr.printlog("Package version: ", qr.Manager().version, "\n", verbose=True, loglevel=0) return 0 # # show just Quantarhei version number # if args.version: qr.printlog("Quantarhei package version: ", qr.Manager().version, "\n", verbose=True, loglevel=0) return 0 if args.destination: ddir = args.destination else: ddir = "ghen" if args.file: print("") print(descr + " ...") filename = args.file else: print("No file specified: quiting") parser.print_help() return 0 steps_pass = True if args.no_pass: steps_pass = False k_from = 0 if args.start_from: k_from = args.start_from try: with open(filename, 'r') as myfile: data = myfile.read() except: raise Exception("Problems reading file: " + filename) parser = Parser() try: feature_file = parser.parse(TokenScanner(data)) except: raise Exception("Problem parsing file: " + filename + " - is it a feature file?") try: children = feature_file["feature"]["children"] except: raise Exception("No scenarii or scenario outlines") return dict(children=children, ddir=ddir, steps_pass=steps_pass, filename=filename, k_from=k_from)
import codecs import os import sys if sys.version_info < (3, 0): import codecs sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) from gherkin.token_scanner import TokenScanner from gherkin.token_formatter_builder import TokenFormatterBuilder from gherkin.parser import Parser files = sys.argv[1:] if sys.version_info < (3, 0) and os.name != 'nt': # for Python2 unless on Windows native UTF8Writer = codecs.getwriter('utf8') sys.stdout = UTF8Writer(sys.stdout) parser = Parser(TokenFormatterBuilder()) for file in files: scanner = TokenScanner(file) print(parser.parse(scanner))
# source: # https://stackoverflow.com/questions/43107367/how-to-use-gherkin-official # see also: # https://github.com/cucumber/gherkin-python/tree/master/test # https://github.com/cucumber-attic/gherkin from gherkin.token_scanner import TokenScanner from gherkin.parser import Parser parser = Parser() feature_file = parser.parse( TokenScanner(''' Feature: Foo Background: Scenario Outline: 12 Scenario Outline: 15 ''')) print(feature_file)