def __init__(self, grammarFile, startSym=None, lastStart=False, quiet=True, deferLinkage=False): self.quiet = quiet self.rules = None self.ruleDict = None self.start = None self.externalDependencies = [] self._externalRuleDicts = {} self._deferLinkage = deferLinkage self._linkageDone = False self._additionalTokenTypes = TokenizerTypeList() full_text = "" with open(grammarFile) as FILE: for line in FILE: full_text += line fullTokenizer = Tokenizer() fullTokenizer.tokenize(full_text) splitToken = Token(';', fullTokenizer.getTTL()['End']) if not fullTokenizer.getLastToken() == splitToken: raise GrammarParsingError( "Found Text after last rule! Did you forget ';'?") ruleTokenizers = fullTokenizer.splitTokensOn(splitToken) try: self.rules = [] for i, tokens in enumerate(ruleTokenizers): if not self.quiet: print("PARSING RULE:", i) nextRule = Rule(tokens) if nextRule.hasDependency(): dependency = nextRule.getDependency() if dependency not in self.externalDependencies: self.externalDependencies.append(dependency) if nextRule.isRegExRule(): self._additionalTokenTypes.addTokenType( nextRule.getRegExTT()) self.rules.append(nextRule) except GrammarError as err: print("Exception thrown while parsing rule", i) print(err.message) raise err except Exception as err: print("Unknown Exception thrown while parsing rule", i) raise err self.ruleDict = {r.lhs().getValue(): r for r in self.rules} if not self._deferLinkage: self.linkRules() if startSym == None: index = 0 # set last rule as start instead of first if lastStart: index = -1 self.start = self.rules[index].lhs().getValue() else: self.setStart(startSym)