def symbols(g: Grammar) -> Symbols: """Registers tokens and words that are shared by all the grammars defined in this moddule.""" s = g.symbols tokens = { "WS": "[\s\n]+", "NUMBER": "[0-9]+(\.[0-9]+)?", "STRING_DQ": "\"[^\"]*\"", "QUERY_NODE": "(([a-z][\-a-z0-9]*):)?([a-z\*\?][\-a-z0-9\*\?]*)", "QUERY_ATTRIBUTE": "@[a-z\*\?]+[\-a-z0-9\*\?]*", "QUERY_VARIABLE": "[A-Z][_A-Z0-9]*", "QUERY_CURRENT_NODE": "\.+", "QUERY_SUBSET": "#(\d+)", "QUERY_AXIS_DESCENDANTS": "\\.?/(\d?/)?", "QUERY_AXIS_ANCESTORS": "\\.?\\\\(\d?\\\\)?", "QUERY_AXIS_BEFORE": "\\.?>(\d?>|-?\\=?\\+?>)?", "QUERY_AXIS_AFTER": "\\.?<(\d?<|-?\\=?\\+?<)?", } words = { "LP": "(", "COLON": ":", "RP": ")", "LB": "{", "RB": "}", "LS": "[", "RS": "]", "QUERY_ATTRIBUTE": "@", "QUERY_AXIS_SELF": "|", } groups = ("ExprValue", "ExprValuePrefix") return ParserUtils.EnsureSymbols(g, tokens, words, groups)
def symbols(g: Grammar) -> Symbols: """Registers tokens and words that are shared by all the grammars defined in this moddule.""" tokens = { "WS": "\s+", "FORMAT_NAME": "[A-Z][_A-Z]*", "FORMAT_VARIANT": "\([A-Za-z][0-9A-Z\-a-z]*\)", "FORMAT_BINDING": ":[A-Z][_A-Z]*", "FORMAT_COMMENT": "\s*//[^\n]+", "TOKEN_RANGE": "\[((.)-(.)|\\-|[^\\]])+\]", "CARDINALITY": "[\?\*\+]", "STRING_SQ": "'[^']+'", "STRING_DQ": "\"[^\"]*\"", "EMPTY_LINE": "s*\n" } words = { "FORMAT_DEF": ":=", "FORMAT_END": ";", "FORMAT_PAT": "-->", "LP": "(", "RP": ")", "PIPE": "|", "UNDERSCORE": "_", "EOL": "\n", } groups = ("Tree", ) return ParserUtils.EnsureSymbols(g, tokens, words, groups)
def symbols(g: Grammar) -> Symbols: """Registers tokens and words that are shared by all the grammars defined in this moddule.""" s = g.symbols tokens = { "WS": r"[ \s]+", "NUMBER": r"[0-9]+(\.[0-9]+)?", "STRING_DQ": r"\"([^\"]*)\"", "EXPR_NAME": r"[a-z][\-a-z0-9]*[\!\?]?", "EXPR_VARIABLE": r"[_A-Z][\_A-Z0-9]*", "EXPR_SYMBOL": r":[A-Za-z][_a-zA-Z0-9]*", "EXPR_SINGLETON": r"#[A-Za-z][\-a-zA-Z0-9]*[\!\?]?", "EXPR_KEY": r"[A-Za-z][\_a-zA-Z0-9]*:", "EXPR_TYPE": r"[A-Z][\_a-zA-Z0-9]*", "EXPR_COMMENT": r";+([^\n]*)", "REST": r"(\\.\\.\\.)|…", } words = { "LP": "(", "RP": ")", "COMMA": ",", "DOT": ".", "QUOTE": "'", "PIPE": "|", "LB": "{", "RB": "}", } groups = () return ParserUtils.EnsureSymbols(g, tokens, words, groups)
def symbols(g: Grammar) -> Symbols: """Registers tokens and words that are shared by all the grammars defined in this moddule.""" s = g.symbols tokens = { "SPACES": "[\s]+", "INDENT": "^[\t]*", } words = { "TAB": "\t", } groups = () return ParserUtils.EnsureSymbols(g, tokens, words, groups)
def parseFile(path: str, isVerbose=False, process=True): return ParserUtils.ParseFile( grammar, path, isVerbose, processor=QueryProcessor.Get() if process else None)
def parseString(text: str, isVerbose=False, process=True): return ParserUtils.ParseString( grammar, text, isVerbose, processor=QueryProcessor.Get() if process else None)
# ----------------------------------------------------------------------------- # # HIGH-LEVEL API # # ----------------------------------------------------------------------------- # TODO: move the core to to tlang.utils.GrammarUtils.parse{String|File|Main}(g,tokens,words,groups) def parseString(text: str, isVerbose=False, process=True): return ParserUtils.ParseString( grammar, text, isVerbose, processor=QueryProcessor.Get() if process else None) def parseFile(path: str, isVerbose=False, process=True): return ParserUtils.ParseFile( grammar, path, isVerbose, processor=QueryProcessor.Get() if process else None) if __name__ == '__main__': ParserUtils.ParseMain(grammar, QueryProcessor.Get()) # EOF - vim: ts=4 sw=4 noet
# ----------------------------------------------------------------------------- # # HIGH-LEVEL API # # ----------------------------------------------------------------------------- # TODO: move the core to to tlang.utils.GrammarUtils.parse{String|File|Main}(g,tokens,words,groups) def parseString(text: str, isVerbose=False, process=True): return ParserUtils.ParseString( grammar, text, isVerbose, processor=FormatProcessor.Get() if process else None) def parseFile(path: str, isVerbose=False, process=True): return ParserUtils.ParseFile( grammar, path, isVerbose, processor=FormatProcessor.Get() if process else None) if __name__ == '__main__': ParserUtils.ParseMain(grammar, FormatProcessor.Get()) # EOF - vim: ts=4 sw=4 noet
def onExprBlock(self, match): node = self.tree.node("ex:list") lines = self.process(match[0]) print("LINES", lines) # for line in lines: # node.append(line) # children = self.process(match[1]) # for child in children: # print ("CHILD", child) # node.append(child) return node def onExprBlockChild(self, match, child): print("BLOCK", child) return child # ----------------------------------------------------------------------------- # # HIGH-LEVEL API # # ----------------------------------------------------------------------------- if __name__ == '__main__': G = grammar(expr_grammar()) print( ParserUtils.ParseMain(lambda isVerbose=False: G, ExprIndentedProcessor.Get())) # EOF - vim: ts=4 sw=4 noet
# ----------------------------------------------------------------------------- # # HIGH-LEVEL API # # ----------------------------------------------------------------------------- # TODO: move the core to to tlang.utils.GrammarUtils.parse{String|File|Main}(g,tokens,words,groups) def parseString(text: str, isVerbose=False, process=True): return ParserUtils.ParseString( grammar, text, isVerbose, processor=ExprProcessor.Get() if process else None) def parseFile(path: str, isVerbose=False, process=True): return ParserUtils.ParseFile( grammar, path, isVerbose, processor=ExprProcessor.Get() if process else None) if __name__ == '__main__': ParserUtils.ParseMain(grammar, ExprProcessor.Get()) # EOF - vim: ts=4 sw=4 noet