def symbols(g: Grammar) -> Symbols: """Registers tokens and words that are shared by all the grammars defined in this moddule.""" s = g.symbols tokens = { "WS": "[\s\n]+", "NUMBER": "[0-9]+(\.[0-9]+)?", "STRING_DQ": "\"[^\"]*\"", "QUERY_NODE": "(([a-z][\-a-z0-9]*):)?([a-z\*\?][\-a-z0-9\*\?]*)", "QUERY_ATTRIBUTE": "@[a-z\*\?]+[\-a-z0-9\*\?]*", "QUERY_VARIABLE": "[A-Z][_A-Z0-9]*", "QUERY_CURRENT_NODE": "\.+", "QUERY_SUBSET": "#(\d+)", "QUERY_AXIS_DESCENDANTS": "\\.?/(\d?/)?", "QUERY_AXIS_ANCESTORS": "\\.?\\\\(\d?\\\\)?", "QUERY_AXIS_BEFORE": "\\.?>(\d?>|-?\\=?\\+?>)?", "QUERY_AXIS_AFTER": "\\.?<(\d?<|-?\\=?\\+?<)?", } words = { "LP": "(", "COLON": ":", "RP": ")", "LB": "{", "RB": "}", "LS": "[", "RS": "]", "QUERY_ATTRIBUTE": "@", "QUERY_AXIS_SELF": "|", } groups = ("ExprValue", "ExprValuePrefix") return ParserUtils.EnsureSymbols(g, tokens, words, groups)
def symbols(g: Grammar) -> Symbols: """Registers tokens and words that are shared by all the grammars defined in this moddule.""" tokens = { "WS": "\s+", "FORMAT_NAME": "[A-Z][_A-Z]*", "FORMAT_VARIANT": "\([A-Za-z][0-9A-Z\-a-z]*\)", "FORMAT_BINDING": ":[A-Z][_A-Z]*", "FORMAT_COMMENT": "\s*//[^\n]+", "TOKEN_RANGE": "\[((.)-(.)|\\-|[^\\]])+\]", "CARDINALITY": "[\?\*\+]", "STRING_SQ": "'[^']+'", "STRING_DQ": "\"[^\"]*\"", "EMPTY_LINE": "s*\n" } words = { "FORMAT_DEF": ":=", "FORMAT_END": ";", "FORMAT_PAT": "-->", "LP": "(", "RP": ")", "PIPE": "|", "UNDERSCORE": "_", "EOL": "\n", } groups = ("Tree", ) return ParserUtils.EnsureSymbols(g, tokens, words, groups)
def symbols(g: Grammar) -> Symbols: """Registers tokens and words that are shared by all the grammars defined in this moddule.""" s = g.symbols tokens = { "WS": r"[ \s]+", "NUMBER": r"[0-9]+(\.[0-9]+)?", "STRING_DQ": r"\"([^\"]*)\"", "EXPR_NAME": r"[a-z][\-a-z0-9]*[\!\?]?", "EXPR_VARIABLE": r"[_A-Z][\_A-Z0-9]*", "EXPR_SYMBOL": r":[A-Za-z][_a-zA-Z0-9]*", "EXPR_SINGLETON": r"#[A-Za-z][\-a-zA-Z0-9]*[\!\?]?", "EXPR_KEY": r"[A-Za-z][\_a-zA-Z0-9]*:", "EXPR_TYPE": r"[A-Z][\_a-zA-Z0-9]*", "EXPR_COMMENT": r";+([^\n]*)", "REST": r"(\\.\\.\\.)|…", } words = { "LP": "(", "RP": ")", "COMMA": ",", "DOT": ".", "QUOTE": "'", "PIPE": "|", "LB": "{", "RB": "}", } groups = () return ParserUtils.EnsureSymbols(g, tokens, words, groups)
def symbols(g: Grammar) -> Symbols: """Registers tokens and words that are shared by all the grammars defined in this moddule.""" s = g.symbols tokens = { "SPACES": "[\s]+", "INDENT": "^[\t]*", } words = { "TAB": "\t", } groups = () return ParserUtils.EnsureSymbols(g, tokens, words, groups)