Example #1
0
    def lower_matcher(expr):
        """
        Lower a token matcher to our internals.

        :type expr: liblktlang.GrammarExpr
        :rtype: langkit.lexer.Matcher
        """
        with ctx.lkt_context(expr):
            if isinstance(expr, liblktlang.TokenLit):
                return Literal(json.loads(text_as_str(expr)))
            elif isinstance(expr, liblktlang.TokenNoCaseLit):
                return NoCaseLit(json.loads(text_as_str(expr)))
            elif isinstance(expr, liblktlang.TokenPatternLit):
                return Pattern(pattern_as_str(expr))
            else:
                check_source_language(False, 'Invalid lexing expression')
Example #2
0
    (Pattern(r"--(.?)+"), Token.Comment),
    (Pattern(r"#(.?)+"), Token.PrepLine),
]

# Those keywords can also be attributes, which is why they have special
# handling, for example as in A'Access.
for kw_text, kw_token in [
    ("access", Token.Access),
    ("range", Token.Range),
    ("digits", Token.Digits),
    ("delta", Token.Delta),
    ("mod", Token.Mod),
]:
    rules.append(
        Case(
            NoCaseLit(kw_text),
            Alt(prev_token_cond=(Token.Tick, ),
                send=Token.Identifier,
                match_size=len(kw_text)),
            Alt(send=kw_token, match_size=len(kw_text))))

rules += [
    # Keywords
    (NoCaseLit("abort"), Token.Abort),
    (NoCaseLit("else"), Token.Else),
    (NoCaseLit("new"), Token.New),
    (NoCaseLit("return"), Token.Return),
    (NoCaseLit("abs"), Token.Abs),
    (NoCaseLit("elsif"), Token.Elsif),
    (NoCaseLit("not"), Token.Not),
    (NoCaseLit("reverse"), Token.Reverse),
Example #3
0
        Char,
    )


gpr_lexer = Lexer(Token)

gpr_lexer.add_patterns(
    ("p_string", r"\"(\"\"|[^\n\"])*\""),
    ("digit", r"[0-9]"),
    ("integer", r"({digit}(_?{digit})*)"),
)

gpr_lexer.add_rules(
    (Pattern(r"[ \t\r\n]+"), Token.Whitespace),
    (Pattern(r"--(.?)+"), Token.Comment),
    (NoCaseLit("all"), Token.All),
    (NoCaseLit("abstract"), Token.Abstract),
    (NoCaseLit("at"), Token.At),
    (NoCaseLit("case"), Token.Case),
    (NoCaseLit("end"), Token.End),
    (NoCaseLit("for"), Token.For),
    (NoCaseLit("is"), Token.Is),
    (NoCaseLit("limited"), Token.Limited),
    (NoCaseLit("private"), Token.Private),
    (NoCaseLit("null"), Token.Null),
    (NoCaseLit("others"), Token.Others),
    (NoCaseLit("package"), Token.Package),
    (NoCaseLit("renames"), Token.Renames),
    (NoCaseLit("type"), Token.Type),
    (NoCaseLit("use"), Token.Use),
    (NoCaseLit("pragma"), Token.Pragma),